其他
【强基固本】100行用Python实现自动求导(不import任何包的情况下)
“强基固本,行稳致远”,科学研究离不开理论基础,人工智能学科更是需要数学、物理、神经科学等基础学科提供有力支撑,为了紧扣时代脉搏,我们推出“强基固本”专栏,讲解AI领域的基础知识,为你的科研学习提供助力,夯实理论基础,提升原始创新能力,敬请关注。
地址:https://zhuanlan.zhihu.com/p/438685307
# 为了不import,我甚至连product都是自己写的=_=
def product(items):
res = 1
for i in items:
res = res * i
return res
# 继承关系:
# Node <- Constant, Variable, Operator
# Operator <- Add, Multiply, Divide, Pow, ...
# 所有的Operator都有子节点,所有的Constant和Variable都没有子结点
class Node:
def __init__(self, name, value=0):
self.name = name
self.value = value
def __eq__(self, other):
return self.name == other.name
def __str__(self):
return str(self.name)
def __repr__(self):
return self.__str__()
class Constant(Node):
def __init__(self, value):
super().__init__(value, value)
def compute_value(self):
return self.value
def compute_derivative(self, to_variable):
return 0
class Variable(Node):
def compute_value(self):
return self.value
def compute_derivative(self, to_variable):
if to_variable.name == self.name:
return 1
else:
return 0
class Operator(Node):
def __init__(self, inputs, name):
self.inputs = inputs
self.name = f"Opt {name} of {inputs}"
def __str__(self):
opt2str = {"Add": "+", "Power": "^", "Multiply": "*", "Divide": "/"}
return "(" + opt2str[self.name.split(" ")[1]].join(map(str, self.inputs)) + ")"
class Add(Operator):
def __init__(self, inputs):
super().__init__(inputs, name="Add")
def compute_value(self):
return sum(inp.compute_value() for inp in self.inputs)
def compute_derivative(self, to_variable):
return sum(inp.compute_derivative(to_variable) for inp in self.inputs)
class Multiply(Operator):
def __init__(self, inputs):
super().__init__(inputs, name="Multiply")
def compute_value(self):
return product(inp.compute_value() for inp in self.inputs)
def compute_derivative(self, to_variable):
return sum(
inp.compute_derivative(to_variable)
* product(
other_inp.compute_value()
for other_inp in self.inputs
if other_inp != inp
)
for inp in self.inputs
)
class Divide(Operator):
def __init__(self, inputs):
super().__init__(inputs, name="Divide")
def compute_value(self):
a, b = [inp.compute_value() for inp in self.inputs]
return a / b
def compute_derivative(self, to_variable):
a, b = [inp.compute_value() for inp in self.inputs]
da, db = [inp.compute_derivative(to_variable) for inp in self.inputs]
return (da * b - db * a) / (b ** 2)
class Power(Operator):
# Constant Power
def __init__(self, inputs):
super().__init__(inputs, name="Power")
def compute_value(self):
x, n = self.inputs
n = n.value
return x.compute_value() ** n
def compute_derivative(self, to_variable):
x, n = self.inputs
n = n.value
return n * (x.compute_value() ** (n - 1)) * x.compute_derivative(to_variable)
if __name__ == "__main__":
print(Add([Varaible("x"),Constant(5)]).compute_derivative())
def wrapper_opt(opt, self, other, r=False):
opt2class = {"add": Add, "mul": Multiply, "pow": Power, "div": Divide}
if not isinstance(other, Node):
other = Constant(other)
inputs = [other, self] if r else [self, other]
node = opt2class[opt](inputs=inputs)
return node
Node.__add__ = lambda self, other: wrapper_opt("add", self, other)
Node.__mul__ = lambda self, other: wrapper_opt("mul", self, other)
Node.__truediv__ = lambda self, other: wrapper_opt("div", self, other)
Node.__pow__ = lambda self, other: wrapper_opt("pow", self, other)
Node.__sub__ = lambda self, other: wrapper_opt(
"add", self, wrapper_opt("mul", Constant(-1), other)
)
Node.__radd__ = lambda self, other: wrapper_opt("add", self, other, r=True)
Node.__rmul__ = lambda self, other: wrapper_opt("mul", self, other, r=True)
Node.__rtruediv__ = lambda self, other: wrapper_opt("div", self, other, r=True)
if __name__ == "__main__":
x = Variable(name="x")
y = Variable(name="y")
function = 3 * (x ** 2) + 5 * x * y + 6 / x - 8 * y ** 2 + 10
x.value = 18
y.value = 2
print(function.compute_value())
print(function.compute_derivative(x))
print(function)
1130.3333333333333
(((((3*(x^2))+((5*x)*y))+(6/x))+(-1*(8*(y^2))))+10)
“强基固本”历史文章
从三角函数变换到图神经网络
点云局部特征描述子 SHOT
直观理解万能近似定理(Universal Approximation theorem)
Kmeans 聚类算法
聚类性能评估-V-Measure
完全图解RNN、RNN变体、Seq2Seq、Attention机制
脉冲神经网络(Spiking Neural Network)介绍
流形学习概述
神经网络15分钟入门!——反向传播到底是怎么传播的?
图神经网络的理论基础
深度学习入门与Pytorch|4.2 变分自编码器(VAE)介绍、推导及代码
深度学习入门与Pytorch4.1 深度学习中的几种自编码器的介绍与Pytorch代码实现
白化变换:PCA白化、ZCA白化
常用卷积神经网络巡礼(论文详解+代码实现
更多强基固本专栏文章,
请点击文章底部“阅读原文”查看
分享、点赞、在看,给个三连击呗!