Skip to content
Permalink
Browse files
Final Commit
  • Loading branch information
shyamalans committed Apr 18, 2023
1 parent a1d67b5 commit 5646bbf71f290d7773e203f47558e012c56e13f7
Show file tree
Hide file tree
Showing 24 changed files with 8,717 additions and 0 deletions.
BIN +2.81 MB NewLoop.wav
Binary file not shown.
BIN +1.06 MB NoisyDrumLoop.wav
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN +216 KB aug.wav
Binary file not shown.

Large diffs are not rendered by default.

80 cnn.py
@@ -0,0 +1,80 @@
from torch import nn
from torchsummary import summary


class CNNNetwork(nn.Module):

def __init__(self, dropoutProb):
super().__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(
in_channels=1,
out_channels=16,
kernel_size=3,
stride=1,
padding=2
),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2)
,nn.BatchNorm2d(16)
)
self.conv2 = nn.Sequential(
nn.Conv2d(
in_channels=16,
out_channels=32,
kernel_size=3,
stride=1,
padding=2
),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2),
nn.BatchNorm2d(32),
nn.Dropout(dropoutProb)
)
self.conv3 = nn.Sequential(
nn.Conv2d(
in_channels=32,
out_channels=64,
kernel_size=3,
stride=1,
padding=2
),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2),
nn.BatchNorm2d(64),
nn.Dropout(dropoutProb)
)
self.conv4 = nn.Sequential(
nn.Conv2d(
in_channels=64,
out_channels=128,
kernel_size=3,
stride=1,
padding=2
),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2),
nn.BatchNorm2d(128),
nn.Dropout(dropoutProb)
)
self.flatten = nn.Flatten()
self.linear = nn.Linear(128 * 5 * 4 , 4)
self.softmax = nn.Softmax(dim=1)



def forward(self, input_data):
x = self.conv1(input_data)
x = self.conv2(x)
x = self.conv3(x)
x = self.conv4(x)
x = self.flatten(x)
logits = self.linear(x)
predictions = self.softmax(logits)
return predictions


if __name__ == "__main__":
cnn = CNNNetwork()
summary(cnn.cuda(), (1, 64, 44))

BIN +720 KB denoised.wav
Binary file not shown.

0 comments on commit 5646bbf

Please sign in to comment.