2020-12-01 21:33:10 +01:00
|
|
|
use lazy_static::lazy_static;
|
2020-12-07 20:12:53 +01:00
|
|
|
use serde::{Deserialize, Serialize};
|
|
|
|
use serde_json::Result;
|
2020-09-29 01:04:28 +02:00
|
|
|
use sha1::{Digest, Sha1};
|
2020-12-07 20:12:53 +01:00
|
|
|
use std::collections::HashMap;
|
2020-09-27 14:52:54 +02:00
|
|
|
use std::fs;
|
2020-11-04 22:33:11 +01:00
|
|
|
use std::path::Path;
|
2020-09-29 01:04:28 +02:00
|
|
|
use std::str;
|
2020-12-01 21:33:10 +01:00
|
|
|
use std::sync::Mutex;
|
2020-11-07 14:25:11 +01:00
|
|
|
use walkdir::WalkDir;
|
2020-09-27 14:52:54 +02:00
|
|
|
|
2020-12-01 21:33:10 +01:00
|
|
|
static BASE_RGIT_DIR: &'static str = ".rgit";
|
|
|
|
|
|
|
|
lazy_static! {
|
|
|
|
static ref RGIT_DIR: Mutex<String> = Mutex::new(BASE_RGIT_DIR.to_owned());
|
|
|
|
static ref OLD_DIR: Mutex<String> = Mutex::new("".to_owned());
|
|
|
|
}
|
2020-09-27 14:52:54 +02:00
|
|
|
|
2020-11-08 00:25:00 +01:00
|
|
|
pub struct RefValue {
|
|
|
|
pub value: String,
|
|
|
|
pub symbolic: bool,
|
|
|
|
}
|
|
|
|
|
2020-12-07 20:12:53 +01:00
|
|
|
#[derive(Serialize, Deserialize)]
|
|
|
|
struct Index {
|
|
|
|
files: HashMap<String, String>,
|
|
|
|
}
|
|
|
|
|
2020-12-01 21:33:10 +01:00
|
|
|
// The below two methods are not the same thing as a "context manager"
|
|
|
|
// I might need to replace it later with a better alternative.
|
|
|
|
pub fn set_rgit_dir(path: &str) {
|
|
|
|
let mut dir = RGIT_DIR.lock().unwrap();
|
|
|
|
let mut old_dir = OLD_DIR.lock().unwrap();
|
|
|
|
|
|
|
|
*old_dir = dir.to_string();
|
|
|
|
*dir = format!("{}/.rgit", path);
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn reset_rgit_dir() {
|
|
|
|
let mut dir = RGIT_DIR.lock().unwrap();
|
|
|
|
let mut old_dir = OLD_DIR.lock().unwrap();
|
|
|
|
if old_dir.to_string() == "" {
|
|
|
|
*dir = BASE_RGIT_DIR.to_string().clone();
|
|
|
|
} else {
|
|
|
|
*dir = old_dir.to_string();
|
|
|
|
*old_dir = "".to_string();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-27 14:52:54 +02:00
|
|
|
pub fn init() -> std::io::Result<()> {
|
2020-12-01 21:33:10 +01:00
|
|
|
let dir = RGIT_DIR.lock().unwrap().to_owned();
|
|
|
|
fs::create_dir(dir.clone())?;
|
|
|
|
fs::create_dir(format!("{}/{}", dir, "objects"))?;
|
2020-09-27 14:52:54 +02:00
|
|
|
Ok(())
|
|
|
|
}
|
2020-09-29 01:04:28 +02:00
|
|
|
|
2020-10-05 13:58:48 +02:00
|
|
|
pub fn hash_object(content: &Vec<u8>, _type: String) -> String {
|
2020-12-01 21:33:10 +01:00
|
|
|
let dir = RGIT_DIR.lock().unwrap().to_owned();
|
2020-10-05 13:58:48 +02:00
|
|
|
let mut raw = format!("{}\u{0}", _type).into_bytes();
|
|
|
|
let mut data = content.clone();
|
|
|
|
raw.append(&mut data);
|
|
|
|
|
2020-09-29 01:04:28 +02:00
|
|
|
let mut hasher = Sha1::new();
|
2020-10-05 13:58:48 +02:00
|
|
|
hasher.update(&raw);
|
2020-09-29 01:04:28 +02:00
|
|
|
let digest = &hasher.finalize();
|
|
|
|
let s = format!("{:x}", digest);
|
|
|
|
|
2020-12-01 21:33:10 +01:00
|
|
|
fs::write(format!("{}/{}/{}", dir, "objects", s), raw.as_slice())
|
2020-10-05 13:58:48 +02:00
|
|
|
.expect("Failed to write object");
|
2020-09-29 01:04:28 +02:00
|
|
|
|
|
|
|
return s;
|
|
|
|
}
|
2020-09-29 12:08:12 +02:00
|
|
|
|
2020-10-05 13:58:48 +02:00
|
|
|
pub fn get_object(hash: String, expected: String) -> String {
|
2020-12-01 21:33:10 +01:00
|
|
|
let dir = RGIT_DIR.lock().unwrap().to_owned();
|
|
|
|
let mut content = fs::read_to_string(format!("{}/{}/{}", dir, "objects", hash))
|
2020-09-29 12:08:12 +02:00
|
|
|
.expect("Could not find a matching object");
|
2020-10-05 13:58:48 +02:00
|
|
|
|
|
|
|
let index = content.find(char::from(0)).expect("object type missing");
|
|
|
|
let data = content.split_off(index + 1);
|
|
|
|
|
|
|
|
if expected != "".to_owned() {
|
|
|
|
// Compare the type
|
2020-10-25 19:35:47 +01:00
|
|
|
content.pop();
|
2020-10-05 13:58:48 +02:00
|
|
|
assert_eq!(expected, content);
|
|
|
|
}
|
|
|
|
|
|
|
|
return data;
|
2020-09-29 12:08:12 +02:00
|
|
|
}
|
2020-11-01 23:38:22 +01:00
|
|
|
|
2020-11-08 14:09:17 +01:00
|
|
|
pub fn update_ref(mut reference: String, value: RefValue, deref: bool) {
|
2020-12-01 21:33:10 +01:00
|
|
|
let dir = RGIT_DIR.lock().unwrap().to_owned();
|
2020-11-08 14:09:17 +01:00
|
|
|
reference = get_ref_internal(reference, deref).0;
|
2020-11-08 14:18:29 +01:00
|
|
|
let content: String;
|
|
|
|
|
|
|
|
assert!(value.value != "");
|
|
|
|
if value.symbolic {
|
|
|
|
content = format!("ref: {}", value.value);
|
|
|
|
} else {
|
|
|
|
content = value.value;
|
|
|
|
}
|
|
|
|
|
2020-12-01 21:33:10 +01:00
|
|
|
let path = format!("{}/{}", dir, reference);
|
2020-11-04 22:33:11 +01:00
|
|
|
let mut parents = Path::new(&path).ancestors();
|
|
|
|
parents.next();
|
|
|
|
|
|
|
|
let parent = parents.next().unwrap().to_str().unwrap();
|
|
|
|
fs::create_dir_all(parent).expect("Cannot create required dirs");
|
2020-11-08 14:18:29 +01:00
|
|
|
fs::write(path, content).expect("Failed to updated HEAD");
|
2020-11-01 23:38:22 +01:00
|
|
|
}
|
2020-11-02 00:01:24 +01:00
|
|
|
|
2020-11-08 14:09:17 +01:00
|
|
|
pub fn get_ref(reference: String, deref: bool) -> RefValue {
|
|
|
|
return get_ref_internal(reference, deref).1;
|
2020-11-02 00:01:24 +01:00
|
|
|
}
|
2020-11-07 14:25:11 +01:00
|
|
|
|
2020-11-28 22:48:38 +01:00
|
|
|
pub fn delete_ref(reference: String, deref: bool) {
|
2020-12-01 21:33:10 +01:00
|
|
|
let dir = RGIT_DIR.lock().unwrap().to_owned();
|
2020-11-28 22:48:38 +01:00
|
|
|
let ref_to_del = get_ref_internal(reference, deref).0;
|
2020-12-01 21:33:10 +01:00
|
|
|
fs::remove_file(format!("{}/{}", dir, ref_to_del)).unwrap();
|
2020-11-28 22:48:38 +01:00
|
|
|
}
|
|
|
|
|
2020-11-14 14:19:15 +01:00
|
|
|
pub fn iter_refs(prefix: &str, deref: bool) -> Vec<(String, RefValue)> {
|
2020-12-01 21:33:10 +01:00
|
|
|
let dir = RGIT_DIR.lock().unwrap().to_owned();
|
2020-11-08 00:25:00 +01:00
|
|
|
let mut refs: Vec<(String, RefValue)> = vec![];
|
2020-11-14 14:19:15 +01:00
|
|
|
|
2020-11-29 15:24:55 +01:00
|
|
|
refs.push(("HEAD".to_owned(), get_ref("HEAD".to_owned(), deref)));
|
|
|
|
refs.push((
|
|
|
|
"MERGE_HEAD".to_owned(),
|
|
|
|
get_ref("MERGE_HEAD".to_owned(), deref),
|
|
|
|
));
|
2020-11-07 14:25:11 +01:00
|
|
|
|
2020-12-01 21:33:10 +01:00
|
|
|
for entry in WalkDir::new(format!("{}/refs/", dir.clone())) {
|
2020-11-07 14:25:11 +01:00
|
|
|
let item = entry.unwrap();
|
|
|
|
let metadata = item.metadata().unwrap();
|
|
|
|
|
|
|
|
if metadata.is_file() {
|
2020-12-01 21:33:10 +01:00
|
|
|
let relative_path = item.path().strip_prefix(dir.clone()).unwrap();
|
2020-11-29 15:24:55 +01:00
|
|
|
refs.push((
|
|
|
|
relative_path.to_str().unwrap().to_owned(),
|
|
|
|
get_ref(relative_path.to_str().unwrap().to_owned(), deref),
|
|
|
|
));
|
2020-11-07 14:25:11 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-29 15:24:55 +01:00
|
|
|
let mut filtered_refs = vec![];
|
|
|
|
for reference in refs {
|
|
|
|
if reference.0.starts_with(prefix) && reference.1.value != "".to_owned() {
|
|
|
|
filtered_refs.push(reference);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return filtered_refs;
|
2020-11-07 14:25:11 +01:00
|
|
|
}
|
2020-11-08 13:49:39 +01:00
|
|
|
|
2020-11-08 14:09:17 +01:00
|
|
|
pub fn get_ref_internal(reference: String, deref: bool) -> (String, RefValue) {
|
2020-12-01 21:33:10 +01:00
|
|
|
let dir = RGIT_DIR.lock().unwrap().to_owned();
|
|
|
|
let ref_path = format!("{}/{}", dir, reference);
|
2020-11-08 14:09:17 +01:00
|
|
|
let mut value = fs::read_to_string(ref_path).unwrap_or("".to_owned());
|
|
|
|
let symbolic = !value.is_empty() && value.starts_with("ref:");
|
2020-11-08 13:49:39 +01:00
|
|
|
|
2020-11-08 14:09:17 +01:00
|
|
|
if symbolic {
|
2020-11-14 14:19:15 +01:00
|
|
|
let new_ref: Vec<&str> = value.splitn(2, ": ").collect();
|
2020-11-08 14:09:17 +01:00
|
|
|
value = new_ref[1].to_owned();
|
|
|
|
if deref {
|
|
|
|
return get_ref_internal(value, deref);
|
|
|
|
}
|
2020-11-08 13:49:39 +01:00
|
|
|
}
|
|
|
|
|
2020-11-08 14:09:17 +01:00
|
|
|
return (reference, RefValue { value, symbolic });
|
2020-11-08 13:49:39 +01:00
|
|
|
}
|
2020-12-06 18:52:03 +01:00
|
|
|
|
|
|
|
pub fn fetch_object_if_missing(oid: String, remote_git_dir: String) {
|
|
|
|
if object_exists(oid.clone()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
let dir = RGIT_DIR.lock().unwrap().to_owned();
|
|
|
|
let rgit_remote = remote_git_dir + "/.rgit";
|
|
|
|
fs::copy(
|
|
|
|
format!("{}/objects/{}", rgit_remote, oid.clone()),
|
|
|
|
format!("{}/objects/{}", dir, oid),
|
|
|
|
)
|
|
|
|
.expect(format!("Failed to fetch {}", oid).as_str());
|
|
|
|
}
|
|
|
|
|
2020-12-06 19:25:41 +01:00
|
|
|
pub fn push_object(oid: String, remote_git_dir: String) {
|
|
|
|
let rgit_remote = remote_git_dir + "/.rgit";
|
2020-12-06 19:39:04 +01:00
|
|
|
let remote_object = format!("{}/objects/{}", rgit_remote, oid.clone());
|
|
|
|
|
|
|
|
if Path::new(&remote_object).exists() {
|
|
|
|
// Only push object if it doesn't exist.
|
|
|
|
// Different implementation from the tutorial, the end result should be
|
|
|
|
// the same.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-12-06 19:25:41 +01:00
|
|
|
let dir = RGIT_DIR.lock().unwrap().to_owned();
|
2020-12-06 19:39:04 +01:00
|
|
|
fs::copy(format!("{}/objects/{}", dir, oid), remote_object)
|
|
|
|
.expect(format!("Failed to push {}", oid).as_str());
|
2020-12-06 19:25:41 +01:00
|
|
|
}
|
|
|
|
|
2020-12-06 18:52:03 +01:00
|
|
|
fn object_exists(oid: String) -> bool {
|
|
|
|
let dir = RGIT_DIR.lock().unwrap().to_owned();
|
|
|
|
let path = format!("{}/objects/{}", dir.clone(), oid.clone());
|
|
|
|
return Path::new(path.as_str()).exists();
|
|
|
|
}
|
2020-12-07 20:12:53 +01:00
|
|
|
|
|
|
|
pub fn get_index() -> HashMap<String, String> {
|
|
|
|
let mut index_files = HashMap::new();
|
|
|
|
let dir = RGIT_DIR.lock().unwrap().to_owned();
|
|
|
|
let index_path = format!("{}/index", dir.clone());
|
|
|
|
let path = Path::new(index_path.as_str());
|
|
|
|
|
|
|
|
if path.exists() {
|
|
|
|
let index_content = fs::read_to_string(path).expect("Failed to read index file");
|
|
|
|
let index: Index = serde_json::from_str(index_content.as_str()).unwrap();
|
|
|
|
index_files = index.files;
|
|
|
|
}
|
|
|
|
|
|
|
|
return index_files;
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn set_index(files: HashMap<String, String>) {
|
|
|
|
let new_index = Index { files: files };
|
|
|
|
let index_content = serde_json::to_string(&new_index).expect("Failed to serialize index");
|
|
|
|
|
|
|
|
let dir = RGIT_DIR.lock().unwrap().to_owned();
|
|
|
|
let index_path = format!("{}/index", dir.clone());
|
|
|
|
let path = Path::new(index_path.as_str());
|
|
|
|
|
|
|
|
fs::write(path, index_content).expect("Failed to write index");
|
|
|
|
}
|