What is decentralized storage?
Decentralized storage is a storage solution based on a blockchain decentralized network, rather than relying on a single centralized entity. Data is stored on various nodes in a distributed network, rather than on a single server under the control of a single organization.
IPFS is a decentralized peer-to-peer file storage network that allows users to store, access, and share files in a distributed manner, providing higher security, privacy, and scalability. StorX enables anyone to securely encrypt, segment, and distribute critical data across multiple managed nodes worldwide. Each file stored on StorX is divided into multiple parts before encryption and stored on separate storage nodes run by different operators located around the world.
def fuse_bn(self):
search_engine=SearchableGraph(graph=self.graph)
paths=search_engine.path_matching(
sp_expr=lambda x:x.type in{'Conv','Gemm','ConvTranspose'},
rp_expr=lambda x,y:False,
ep_expr=lambda x:x.type=='BatchNormalization',
direction='down')
for path in paths:
path=path.tolist()
assert len(path)==2,('Oops seems we got something unexpected.')
computing_op,bn_op=path
assert isinstance(computing_op,Operation)and isinstance(bn_op,Operation)
if(len(self.graph.get_downstream_operations(computing_op))!=1 or
len(self.graph.get_upstream_operations(bn_op))!=1):
ppq_warning(f'PPQ can not merge operation{computing_op.name}and{bn_op.name},'
'this is not suppose to happen with your network,'
'network with batchnorm inside might not be able to quantize and deploy.')
continue
assert len(bn_op.parameters)==4,'BatchNorm should have 4 parameters,namely alpha,beta,mean,var'
alpha=bn_op.parameters[0].value
beta=bn_op.parameters[1].value
mean=bn_op.parameters[2].value
var=bn_op.parameters[3].value
epsilon=bn_op.attributes.get('epsilon',1e-5)
if computing_op.num_of_parameter==1:
w=computing_op.parameters[0].value#no bias.
assert isinstance(w,torch.Tensor),'values of parameters are assumed as torch Tensor'
if computing_op.type=='ConvTranspose':
b=torch.zeros(w.shape[1]*computing_op.attributes.get('group',1))
elif computing_op.type=='Gemm'and computing_op.attributes.get('transB',0)==0:
b=torch.zeros(w.shape[1])
else:
b=torch.zeros(w.shape[0])
else:
w,b=[var.value for var in computing_op.parameters[:2]]#has bias.
if computing_op.type=='Conv':
#calculate new weight and bias
scale=alpha/torch.sqrt(var+epsilon)
w=wscale.reshape([-1]+[1](w.ndim-1))
b=alpha*(b-mean)/torch.sqrt(var+epsilon)+beta
elif computing_op.type=='Gemm':
#calculate new weight and bias
scale=alpha/torch.sqrt(var+epsilon)
if computing_op.attributes.get('transB',0):
w=w*scale.reshape([-1,1])
else:
w=w*scale.reshape([1,-1])
b=alpha*(b-mean)/torch.sqrt(var+epsilon)+beta
elif computing_op.type=='ConvTranspose':
scale=alpha/torch.sqrt(var+epsilon)
group=computing_op.attributes.get('group',1)
scale=scale.reshape([group,1,-1,1,1])
w=w.reshape([group,-1,w.shape[1],w.shape[2],w.shape[3]])*scale
w=w.reshape([w.shape[0]*w.shape[1],w.shape[2],w.shape[3],w.shape[4]])
b=alpha*(b-mean)/torch.sqrt(var+epsilon)+beta
else:
raise TypeError(
f'Unexpected op type{computing_op.type}.'
f'Can not merge{computing_op.name}with{bn_op.name}')
#create new op and variable
merged_op=Operation(computing_op.name,op_type=computing_op.type,
attributes=computing_op.attributes.copy())
weight_var=Variable(computing_op.name+'_weight',w,True,[merged_op])
bias_var=Variable(computing_op.name+'_bias',b,True,[merged_op])
#replace&dirty work
input_var=computing_op.inputs[0]
output_var=bn_op.outputs[0]
input_var.dest_ops.remove(computing_op)
input_var.dest_ops.append(merged_op)
output_var.source_op=merged_op
#delete old operations
computing_op.inputs.pop(0)
bn_op.outputs.clear()
self.graph.remove_operation(computing_op)
#insert new
self.graph.append_operation(merged_op)
merged_op.inputs.extend([input_var,weight_var,bias_var])
merged_op.outputs.extend([output_var])
self.graph.append_variable(weight_var)
self.graph.append_variable(bias_var)