Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
tensorflow>=2.10.0
keras>=2.10.0
numpy>=1.21.0
pandas>=1.3.0
matplotlib>=3.5.0
pillow>=9.0.0
scikit-learn>=1.0.0
tqdm>=4.62.0
imageio>=2.19.0
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import os
import torch
import torch.nn as nn
import torch.optim as optim
Expand All @@ -12,7 +13,10 @@
# Set device
device = "cuda" # torch.device("cuda" if torch.cuda.is_available() else "cpu")
learning_method = "contrastive_embedding"
saved_model_path = "/home/kartik/git/deepLense_transformer_ssl/output/pretrained_contrastive_embedding.pth"

# Define base directory relative to script location
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
saved_model_path = os.path.join(BASE_DIR, "output", "pretrained_contrastive_embedding.pth")

# Set hyperparameters
batch_size = 128
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import os
import torch
import torch.nn as nn
import torch.optim as optim
Expand All @@ -21,7 +22,16 @@
# Set device
device = "cuda" # torch.device("cuda" if torch.cuda.is_available() else "cpu")
learning_method = "contrastive_embedding"
saved_model_path = "/home/kartik/git/DeepLense/Transformers_Classification_DeepLense_Kartik_Sachdev/logger/2023-07-23-13-30-24/checkpoint/Resnet_finetune_Model_II_2023-07-23-13-30-24.pt"

# Define base directory relative to script location
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
saved_model_path = os.path.join(
BASE_DIR,
"logger",
"2023-07-23-13-30-24",
"checkpoint",
"Resnet_finetune_Model_II_2023-07-23-13-30-24.pt"
)

# Set hyperparameters
batch_size = 512
Expand Down
17 changes: 14 additions & 3 deletions Transformers_Classification_DeepLense_Kartik_Sachdev/inference.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from __future__ import print_function

import os
from turtle import down

from utils.dataset import DefaultDatasetSetupSSL
Expand All @@ -19,8 +19,19 @@ def main():
labels_map = {0: "axion", 1: "cdm", 2: "no_sub"}
image_size = 224
channels = 1
log_dir = "/home/kartik/git/DeepLense/Transformers_Classification_DeepLense_Kartik_Sachdev/logger/2023-07-23-13-30-24"
finetune_model_path = "/home/kartik/git/DeepLense/Transformers_Classification_DeepLense_Kartik_Sachdev/logger/2023-07-23-13-30-24/checkpoint/Resnet_finetune_Model_II.pt"

# Define base directory relative to script location
BASE_DIR = os.path.dirname(os.path.abspath(__file__))

# Use relative paths instead of hardcoded absolute paths
log_dir = os.path.join(BASE_DIR, "logger", "2023-07-23-13-30-24")
finetune_model_path = os.path.join(
BASE_DIR,
"logger",
"2023-07-23-13-30-24",
"checkpoint",
"Resnet_finetune_Model_II.pt"
)
batch_size = 512
num_workers = 8

Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import os
import torch
import torch.nn as nn
import torch.optim as optim
Expand Down Expand Up @@ -106,8 +107,31 @@ def get_transfomer_finetuned(saved_model_path, device, num_classes) -> nn.Module
def main():
# Set device
device = "cuda" # torch.device("cuda" if torch.cuda.is_available() else "cpu")
pretrained_model_path = "/home/kartik/git/DeepLense/Transformers_Classification_DeepLense_Kartik_Sachdev/logger/2023-07-25-06-27-13/checkpoint/CrossFormer_pretrained_Model_II_2023-07-25-06-27-13.pt"
finetuned_model_path = "/home/kartik/git/DeepLense/Transformers_Classification_DeepLense_Kartik_Sachdev/logger/2023-07-25-06-27-13/checkpoint/CrossFormer_finetuned_Model_II_2023-07-25-06-27-13.pt"

# Define base directory relative to script location
BASE_DIR = os.path.dirname(os.path.abspath(__file__))

# Use relative paths instead of hardcoded absolute paths
pretrained_model_path = os.path.join(
BASE_DIR,
"..",
"..",
"..",
"logger",
"2023-07-25-06-27-13",
"checkpoint",
"CrossFormer_pretrained_Model_II_2023-07-25-06-27-13.pt"
)
finetuned_model_path = os.path.join(
BASE_DIR,
"..",
"..",
"..",
"logger",
"2023-07-25-06-27-13",
"checkpoint",
"CrossFormer_finetuned_Model_II_2023-07-25-06-27-13.pt"
)

# Set hyperparameters
batch_size = 512
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from __future__ import print_function
import os

from turtle import down

Expand Down Expand Up @@ -107,8 +108,22 @@ def main():
labels_map = {0: "axion", 1: "cdm", 2: "no_sub"}
image_size = 224
channels = 1
log_dir = "/home/kartik/git/DeepLense/Transformers_Classification_DeepLense_Kartik_Sachdev/logger/2023-07-25-06-27-13"
finetune_model_path = "/home/kartik/git/DeepLense/Transformers_Classification_DeepLense_Kartik_Sachdev/logger/2023-07-25-06-27-13/checkpoint/CrossFormer_finetuned_Model_II_2023-07-25-06-27-13.pt"

# Define base directory relative to script location
BASE_DIR = os.path.dirname(os.path.abspath(__file__))

# Use relative paths instead of hardcoded absolute paths
log_dir = os.path.join(BASE_DIR, "..", "..", "..", "logger", "2023-07-25-06-27-13")
finetune_model_path = os.path.join(
BASE_DIR,
"..",
"..",
"..",
"logger",
"2023-07-25-06-27-13",
"checkpoint",
"CrossFormer_finetuned_Model_II_2023-07-25-06-27-13.pt"
)
batch_size = 512
num_workers = 8

Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import os
import torch
import torch.nn as nn
import torch.optim as optim
Expand All @@ -21,8 +22,29 @@
# Set device
device = "cuda" # torch.device("cuda" if torch.cuda.is_available() else "cpu")
learning_method = "contrastive_embedding"
pretrained_model_path = "/home/kartik/git/DeepLense/Transformers_Classification_DeepLense_Kartik_Sachdev/logger/2023-07-23-13-30-24/checkpoint/Resnet_finetune_Model_II_2023-07-23-13-30-24.pt"
finetuned_model_path = "/home/kartik/git/DeepLense/Transformers_Classification_DeepLense_Kartik_Sachdev/logger/2023-07-23-13-30-24/checkpoint/Resnet_finetune_Model_II.pt"

# Define base directory relative to script location
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
pretrained_model_path = os.path.join(
BASE_DIR,
"..",
"..",
"..",
"logger",
"2023-07-23-13-30-24",
"checkpoint",
"Resnet_finetune_Model_II_2023-07-23-13-30-24.pt"
)
finetuned_model_path = os.path.join(
BASE_DIR,
"..",
"..",
"..",
"logger",
"2023-07-23-13-30-24",
"checkpoint",
"Resnet_finetune_Model_II.pt"
)

# Set hyperparameters
batch_size = 512
Expand Down