Advertisement
ZergRushA

rt-ai-5 (done)

Oct 20th, 2022 (edited)
95
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 5.88 KB | None | 0 0
  1. '''
  2. import math
  3. #task 1
  4.  
  5. class AdvancedCalculus:
  6.    
  7.  
  8.    def my_sin(self, theta):
  9.        theta = math.fmod(theta + math.pi, 2*math.pi) - math.pi
  10.        result = 0
  11.        termsign = 1
  12.        power = 1
  13.        
  14.        for i in range(10):
  15.            result += (math.pow(theta, power)/math.factorial(power)) * termsign
  16.            termsign *= -1
  17.            power += 2
  18.        return result
  19.    
  20.    
  21.    def my_cos(self, theta):
  22.        theta = math.fmod(theta + math.pi, 2*math.pi) - math.pi
  23.        result = 0
  24.        termsign = 1
  25.        power = 0
  26.        
  27.        for i in range(10):
  28.            result += (math.pow(theta, power)/math.factorial(power))*termsign
  29.            termsign *= -1
  30.            power += 2
  31.        
  32.        return result
  33.  
  34.    
  35.    def my_tan(self, theta):
  36.        return self.my_sin(theta)/self.my_cos(theta)
  37.    
  38.    
  39.    @staticmethod
  40.    def grad_into_rad(grads):
  41.        return (grads*math.pi)/180
  42.  
  43.  
  44.    def my_acos(self, value):
  45.        delta = 1*10**(-5)
  46.  
  47.        x = math.pi*(1-value)/2
  48.        last = x
  49.        x += (self.my_cos(x-value))/self.my_sin(x)
  50.        while abs(x-last) > delta:
  51.            last = x
  52.            x += (self.my_cos(x-value))/self.my_sin(x)
  53.  
  54.        return x
  55.  
  56.  
  57.    def my_asin(self, value):
  58.        approx_value = 0
  59.        for i in range(5):
  60.            coef = value**(2*i+1)
  61.            f_up = math.factorial(2*i)
  62.            f1_lw = 4**i
  63.            f2_lw = (math.factorial(i))**2
  64.            f3_lw = (2*i+1)
  65.  
  66.            approx_value += coef * (f_up/(f1_lw*f2_lw*f3_lw))
  67.  
  68.        return approx_value
  69.  
  70.  
  71.    def my_acos(self, value):
  72.        approx_value = 0
  73.        for i in range(5):
  74.            coef = value**(2*i+1)
  75.            f_up = math.factorial(2*i)
  76.            f1_lw = 4**i
  77.            f2_lw = (math.factorial(i))**2
  78.            f3_lw = (2*i+1)
  79.  
  80.            approx_value += coef * (f_up/(f1_lw*f2_lw*f3_lw))
  81.  
  82.        return math.pi/2 - approx_value
  83.  
  84.  
  85.    def my_atan(self, value):
  86.        approx_value = 0
  87.        for i in range(1, 5):
  88.            coef = value**(2*i-1)
  89.            f_up = (-1)**(i-1)
  90.            f_lw = (2*i-1)
  91.  
  92.            approx_value += (f_up/f_lw)*coef
  93.  
  94.        return approx_value
  95.    
  96.  
  97.  
  98. # task 2
  99.  
  100. tree = ['a', ['b', ['d',[],[]], ['e',[],[]] ], ['c', ['f',[],[]], []] ]
  101.  
  102. print("Left Subtree: {0}".format(tree[1]),
  103.    "Rigth Subtree: {0}".format(tree[2]))
  104.  
  105.  
  106.  
  107. #task 3
  108.  
  109. НЕ ДОДЕЛАЛ
  110. |
  111. |
  112. |
  113. V
  114.  
  115. class Error(Exception):
  116.    pass
  117.  
  118.  
  119. class ExistingValue(Error):
  120.    pass
  121.  
  122. class Tree:
  123.    def __init__(self, data):
  124.        self.left = None
  125.        self.right = None
  126.        self.data = data
  127.    def insert(self, data):
  128.        if self.data is None:
  129.            self.data = data
  130.            return
  131.        if data < self.data:
  132.            if self.left is None:
  133.                self.left = Tree(data)
  134.            else:
  135.                self.left.insert(data)
  136.        elif data > self.data:
  137.            if self.right is None:
  138.                self.right = Tree(data)
  139.            else:
  140.                self.right.insert(data)
  141.        else:
  142.            raise ExistingValue(f"Tree with data:{data} already exists")
  143.    
  144.    def print_tree(self, dir='root', level=0):
  145.        print(f"[{dir}] #{level}-{self} | left-{self.left} | right-{self.right}")
  146.        if self.left is not None:
  147.            self.left.print_tree(dir='left', level=level+1)
  148.        if self.right is not None:
  149.            self.right.print_tree(dir='right', level=level+1)
  150.    
  151.    def __str__(self):
  152.        return f"Tree({self.data})"
  153.  
  154.  
  155. tree = Tree(40)
  156. tree.insert(11)
  157. tree.insert(2)
  158. tree.insert(5)
  159. tree.insert(3)
  160. tree.insert(18)
  161. tree.insert(17)
  162. tree.insert(1)
  163. tree.insert(13)
  164. tree.print_tree()
  165.  
  166. # Task 4
  167.  
  168. import numpy as np
  169. import pandas as pd
  170. import seaborn as sn
  171. import matplotlib.pyplot as plt
  172. from sklearn.model_selection import train_test_split
  173. from sklearn.tree import DecisionTreeClassifier
  174. from sklearn.metrics import classification_report, confusion_matrix
  175. from sklearn import tree
  176.  
  177. x = np.array([      [-1, -1],
  178.                    [-2, -1],
  179.                    [-3, -2],
  180.                    [1, 1],
  181.                    [2, 1],
  182.                    [3, 2]  ])
  183.  
  184. dataset = pd.DataFrame(data=x)
  185.  
  186. x_train, x_test, y_train, y_test = train_test_split(
  187.        dataset.iloc[:, :-1],
  188.        dataset.iloc[:, -1],
  189.        test_size = 0.20
  190.    )
  191.  
  192. classifier = DecisionTreeClassifier()
  193. classifier.fit(x_train, y_train)
  194.  
  195. tree.plot_tree(classifier)
  196.  
  197. y_pred = classifier.predict(x_test)
  198.  
  199. print(confusion_matrix(y_test, y_pred))
  200. print(classification_report(y_test, y_pred))
  201.  
  202.  
  203. target = [0,0,0,1,1,1]
  204.  
  205.  
  206. # Task 5
  207.  
  208. import pandas as pd
  209. import numpy as np
  210.  
  211. import matplotlib.pyplot as plt
  212. from sklearn.tree import DecisionTreeRegressor
  213. from sklearn.model_selection import train_test_split
  214. from sklearn import tree
  215. from sklearn import metrics
  216.  
  217. url = r'https://raw.githubusercontent.com/aniruddhachoudhury/Red-Wine-Quality/master/winequality-red.csv'
  218.  
  219. dataset = pd.read_csv(url)
  220. dataset.head()
  221.  
  222. print(dataset.shape)
  223. dataset.describe()
  224.  
  225. plt.scatter(dataset['pH'], dataset['quality'], color='b')
  226. plt.xlabel("Качество")
  227. plt.ylabel("Кислотность (в процентах)")
  228. plt.show()
  229.  
  230. x = dataset.iloc[:, :-1].values
  231. y = dataset.iloc[:, 1].values
  232. print(x, y, sep='\n')
  233.  
  234. x_train, x_test, y_train, y_test = train_test_split(
  235.    x, y, test_size=0.2, random_state=0
  236. )
  237.  
  238. regressor = DecisionTreeRegressor()
  239. regressor.fit(x_train, y_train)
  240.  
  241. tree.plot_tree(regressor)
  242.  
  243. y_pred = regressor.predict(x_test)
  244. print(y_pred)
  245.  
  246. df = pd.DataFrame({"Actual": y_test, "Predicted": y_pred})
  247. print(df)
  248.  
  249. print("Mean Squared Error: ", metrics.mean_squared_error(y_test, y_pred))
  250. print("Mean Absolute Error: ", metrics.mean_absolute_error(y_test, y_pred))
  251. print(metrics.mean_absolute_error(y_test, y_pred) / np.average(y) * 100)
  252.  
  253.  
  254. '''
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement