好看小说网 > 女生频道 > 印度神话 > 告密者的下场

告密者的下场(1 / 2)

(' 告密者的下场 (第1/2页)

从前有座山,叫巴里赫德,他一个当一座山,十分的爽。

哎,想不到吧。

这个山里面有树林,有庙,有这个山里灵活的狗。山上有棵树,这棵树不叫高树,因为这个梗太老了。这棵树的形状有些奇特,大概就长这个样子。

importtorch

fromtorchimportnn

importtorch.nn.functionalasF

importos

importtensorboardX

fromtorch.utils.dataimportDataset

fromtorch.utils.dataimportDataLoader

device=torch.device(&#39cuda&#39iftorch.cuda.is_available()else&#39cpu&#39)

classVGGBaseSimpleS2(nn.Module):

def__init__(self):

super(VGGBaseSimpleS2,self).__init__()

self.conv1=nn.Sequential(

nn.Conv2d(1,12,kernel_size=3,stride=1,padding=1),

#nn.BatchNorm2d(16),

nn.ReLU()

)

#6*6

self.max_pooling1=nn.MaxPool2d(kernel_size=2,stride=1)

#5*5

self.conv2_1=nn.Sequential(

nn.Conv2d(12,24,kernel_size=3,stride=1,padding=1),

nn.ReLU()

)

self.max_pooling2_1=nn.MaxPool2d(kernel_size=2,stride=1)

#4*4

self.conv2_2=nn.Sequential(

nn.Conv2d(24,24,kernel_size=3,stride=1,padding=1),

nn.ReLU()

)

self.max_pooling2=nn.MaxPool2d(kernel_size=2,stride=2)

#2*2

#2*2

self.fc=nn.Linear(24*2*2,2)

defforward(self,x):

batchsize=x.size(0)

out=self.conv1(x)

out=self.max_pooling1(out)

out=self.conv2_1(out)

out=self.conv2_2(out)

out=self.max_pooling2(out)

out=out.view(batchsize,-1)

out=self.fc(out)

out=F.log_softmax(out,dim=1)

returnout

classTrainingDataSet(Dataset):

def__init__(self):

super(TrainingDataSet,self).__init__()

self.data_dict_X=X_train

self.data_dict_y=y_train

def__getitem__(self,index):

t=self.data_dict_X[index,0:36]

t=torch.tensor(t).view(6,6)

returnt,self.data_dict_y[index]

def__len__(self):

returnlen(self.data_dict_y)

classTestDataSet(Dataset):

def__init__(self):

super(TestDataSet,self).__init__()

self.data_dict_X=X_validate

self.data_dict_y=y_validate

def__getitem__(self,index):

t=self.data_dict_X[index,0:36]

t=torch.tensor(t).view(6,6)

returnt,self.data_dict_y[index]

def__len__(self):

returnlen(self.data_dict_y)

defcnn_classification():

batch_size=256

trainDataLoader=DataLoader(TrainingDataSet(),batch_size=batch_size,shuffle=False)

testDataLoader=DataLoader(TestDataSet(),batch_size=batch_size,shuffle=False)

epoch_num=200

#lr=0.001

lr=0.001

net=VGGBaseSimpleS2().to(device)

print(net)

#loss

loss_func=nn.CrossEntropyLoss()

#optimizer

optimizer=torch.optim.Adam(net.parameters(),lr=lr)

#optimizer=torch.optim.SGD(net.parameters(),lr=lr,momentum=0.9,weight_decay=5e-4)

scheduler=torch.optim.lr_scheduler.StepLR(optimizer,step_size=5,gamma=0.9)

ifnotos.path.exists(“logCNN“):

os.mkdir(“logCNN“)

writer=tensorboardX.SummaryWriter(“logCNN“)

forepochinrange(epoch_num):

train_sum_loss=0

train_sum_correct=0

train_sum_fp=0

train_sum_fn=0

train_sum_tp=0

train_sum_tn=0

fori,datainenumerate(trainDataLoader):

net.train()

inputs,labels=data

inputs=inputs.unsqueeze(1).to(torch.float32)

labels=labels.type(torch.LongTensor)

最新小说: 异界轩辕 仓氏呓语 大师兄被迫救世 hp咒术与魔法 大浴女 小自闭猛发力,撩的竹马没脾气! 极品武僧 ????_??? 暗恋那些花样年华 谁家谈恋爱还要兼修缝纫技术啊!