Skip to content
Snippets Groups Projects
Commit 1c122b99 authored by Mahyar Vahabi's avatar Mahyar Vahabi
Browse files

im losing brain cells

parent fb11498f
No related branches found
No related tags found
No related merge requests found
#include <iostream>
#include <unordered_map>
#include <vector>
#include <random>
#include <fstream>
#include <sstream>
#include <algorithm>
#include <chrono>
using namespace std;
class ORAM {
private:
unordered_map<int, string> storage;
vector<int> accessHistory;
const string filename = "oram_data.bin";
void load_from_file() {
ifstream file(filename, ios::binary);
if (file.is_open()) {
int key;
string value;
while (file >> key) {
file.ignore();
getline(file, value);
storage[key] = value;
}
file.close();
}
}
void save_to_file() {
ofstream file(filename, ios::binary | ios::trunc);
if (file.is_open()) {
for (const auto& pair : storage) {
file << pair.first << " " << pair.second << "\n";
}
file.close();
}
}
void perform_dummy_reads() {
random_device rd;
mt19937 gen(rd());
uniform_int_distribution<int> dist(1, 100);
int num_reads = dist(gen) % 10 + 1; // Random dummy accesses
for (int i = 0; i < num_reads; i++) {
int dummy_key = dist(gen);
storage.find(dummy_key);
}
}
public:
ORAM() {
load_from_file();
}
void store(int key, string value) {
/*
* ORAM Implementation for Securely Storing Queries & Model Parameters
* This version supports query storage and retrieval with obfuscated access patterns.
*
* Author: Lily Faris
*/
#include <iostream>
#include <unordered_map>
#include <vector>
#include <random>
#include <fstream>
#include <algorithm>
#include <chrono>
using namespace std;
class ORAM {
private:
unordered_map<int, string> storage; // Securely stores parameters & queries
vector<int> accessHistory; // Tracks access patterns
hash<string> str_hash;
const string filename = "oram_data.txt"; // Persistent storage file
void load_from_file() {
ifstream file(filename);
if (file.is_open()) {
int key;
string value;
while (file >> key) {
file.ignore();
getline(file, value);
storage[key] = value;
}
file.close();
}
}
void save_to_file() {
ofstream file(filename, ios::trunc);
if (file.is_open()) {
for (const auto& pair : storage) {
file << pair.first << " " << pair.second << "\n";
}
file << "\n";
file.close();
}
}
public:
ORAM() {
load_from_file(); // Load stored data at initialization
}
void store(int key, string value) {
storage[key] = value;
accessHistory.push_back(key);
save_to_file();
}
string retrieve(int key) {
perform_dummy_reads(); // Obfuscate access patterns
if (storage.find(key) != storage.end()) {
return storage[key];
}
return "Parameter not found";
}
// void shuffle() {
// random_device rd;
// mt19937 g(rd());
// shuffle(accessHistory.begin(), accessHistory.end(), g);
// }
size_t hashed_key = str_hash(to_string(key));
storage[hashed_key] = value;
accessHistory.push_back(hashed_key);
void shuffle() {
random_device rd;
mt19937 g(rd());
std::shuffle(accessHistory.begin(), accessHistory.end(), g);
save_to_file();
}
/*
void perform_dummy_reads() {
random_device rd;
mt19937 gen(rd());
uniform_int_distribution<int> dist(1, 100);
for (int i = 0; i < rand() % 5 + 1; i++) {
int dummy_key = dist(gen);
storage.find(str_hash(to_string(dummy_key))); // Fake access
}
}
void debug_display() {
cout << "Stored Parameters:\n";
for (const auto& pair : storage) {
cout << "Key: " << pair.first << " | Value: " << pair.second << endl;
*/
void log_access(int key) {
ofstream log_file("oram_access_log.txt", ios::app);
if (log_file.is_open()) {
log_file << "Accessed Key: " << key << " at "
<< chrono::system_clock::to_time_t(chrono::system_clock::now()) << "\n";
log_file.close();
}
}
string retrieve(int key) {
size_t hashed_key = str_hash(to_string(key));
//perform_dummy_reads(); // Add noise but its not working i don't think
log_access(key);
if (storage.find(hashed_key) != storage.end()) {
return storage[hashed_key];
}
return "Data not found";
}
void shuffle() {
random_device rd;
mt19937 g(rd());
std::shuffle(accessHistory.begin(), accessHistory.end(), g);
}
void debug_display() {
ofstream shuffle_file("shuffling.txt", ios::trunc);
if (shuffle_file.is_open()) {
shuffle_file << "Stored Parameters:\n";
for (const auto& pair : storage) {
shuffle_file << "Key: " << pair.first << " | Value: " << pair.second << endl;
}
shuffle_file.close();
}
}
};
};
int main(int argc, char* argv[]) {
ORAM oram;
......@@ -103,7 +126,6 @@ int main(int argc, char* argv[]) {
if (command == "store" && argc == 4) {
int key = stoi(argv[2]);
oram.store(key, argv[3]);
cout << "Stored successfully!" << endl;
}
else if (command == "retrieve" && argc == 3) {
int key = stoi(argv[2]);
......@@ -112,13 +134,10 @@ int main(int argc, char* argv[]) {
else if (command == "shuffle") {
cout << "Shuffling access patterns..." << endl;
oram.shuffle();
cout << "Displaying stored data..." << endl;
oram.debug_display();
}
else {
cout << "Invalid command." << endl;
return 1;
}
return 0;
}
}
\ No newline at end of file
......@@ -7,59 +7,73 @@ import random
def compile_oram():
"""Compiles the ORAM C++ code if not already compiled."""
if not os.path.exists("test"):
if not os.path.exists("test"): # Check if executable exists
print("Compiling ORAM C++ code...")
compile_command = ["g++", "test.cpp", "-o", "test"]
subprocess.run(compile_command, check=True)
print("Compilation complete.")
def store_parameter(key, tensor):
def store(key, tensor):
"""Stores a PyTorch tensor securely using ORAM."""
tensor_bytes = pickle.dumps(tensor) # Serialize tensor
subprocess.run(["./test", "store", str(key), tensor_bytes.hex()], check=True)
def retrieve_parameter(key):
"""Retrieves a PyTorch tensor securely using ORAM."""
def retrieve(key):
"""Retrieves a value securely from ORAM."""
result = subprocess.run(["./test", "retrieve", str(key)], capture_output=True, text=True, check=True)
tensor_bytes = bytes.fromhex(result.stdout.strip())
return pickle.loads(tensor_bytes)
def perform_shuffle():
def shuffle():
"""Calls the ORAM executable to shuffle access patterns."""
subprocess.run(["./test", "shuffle"], check=True)
def generate_response(query, api_key):
"""Generate a response using GPT-4o-mini."""
chat_model = ChatOpenAI(model="gpt-4o-mini", openai_api_key=api_key)
messages = [{"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": query}]
messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": query}
]
response = chat_model.invoke(messages)
return response
def main():
api_key = "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"
compile_oram() # Ensure ORAM is compiled
# Example: Simulated LLM parameters (PyTorch tensors)
print("Storing model parameters...")
parameters = {
1: torch.randn(4, 4), # Simulated weight matrix
2: torch.randn(4), # Simulated bias vector
3: torch.randn(4, 4),
4: torch.randn(4),
}
# Store and retrieve user queries securely
user_query = input("Enter Prompt here: ")
api_key = "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"
query_key = random.randint(len(parameters), 10000) # Assign a random key
print("Storing initial model parameters...")
store(query_key, user_query)
for key, tensor in parameters.items():
store_parameter(key, tensor)
store(key, tensor)
shuffle() # Shuffle ORAM access patterns
print("Retrieving stored parameters...")
for key in parameters.keys():
print(f"Retrieving key {key}:", retrieve_parameter(key))
perform_shuffle() # Obfuscate access patterns
query = "What is the capital of France?"
response = generate_response(query, api_key)
print(f"Retrieving key {key}:", retrieve(key))
retrieved_query = retrieve(query_key)
print(f"Retrieved Query: {retrieved_query}")
response = generate_response(retrieved_query, api_key)
print("AI Response:", response.content)
if __name__ == "__main__":
main()
main()
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment