Compare commits
33 Commits
d86b35a5fe
...
dev
| Author | SHA256 | Date | |
|---|---|---|---|
| 1523c973f4 | |||
| 5cf67648af | |||
| 839799a13f | |||
| 10c68bf260 | |||
| bab5e958cb | |||
| fc150be724 | |||
| aa3bf531f9 | |||
| 3f97ccee25 | |||
| 3ae6b86b8e | |||
| d83873c763 | |||
| de401b5474 | |||
| fde2615107 | |||
| 59e173c54f | |||
| 712cbc4788 | |||
| 207d166227 | |||
| 74b8c9cfae | |||
| 1b1834d4e6 | |||
| dfd3c07d2f | |||
| d094a60725 | |||
| 41ff025098 | |||
| ca2cdbfded | |||
| b4d7373933 | |||
| 2f4985c443 | |||
| 0a879cbfe9 | |||
| f60b58f2f2 | |||
| 6f618b2340 | |||
| 38940995b5 | |||
| f686ea65b1 | |||
| 23bfdefd30 | |||
| 38d281543e | |||
| a12506b8be | |||
| 43cfd788f3 | |||
| da42de5466 |
7
.gitignore
vendored
7
.gitignore
vendored
@@ -1,6 +1,11 @@
|
|||||||
# ignora log di h5tojson e jsontoh5
|
# ignores logs of h5tojson, jsontoh5
|
||||||
*.log
|
*.log
|
||||||
|
|
||||||
|
# ignores any output of main.py
|
||||||
|
output/*.json
|
||||||
|
output/*.h5
|
||||||
|
output/*.nxs
|
||||||
|
|
||||||
# ---> Python
|
# ---> Python
|
||||||
# Byte-compiled / optimized / DLL files
|
# Byte-compiled / optimized / DLL files
|
||||||
__pycache__/
|
__pycache__/
|
||||||
|
|||||||
0
output/placeholder
Normal file
0
output/placeholder
Normal file
@@ -1,2 +1,3 @@
|
|||||||
requests
|
requests
|
||||||
asyncio
|
asyncio
|
||||||
|
h5py
|
||||||
@@ -33,6 +33,8 @@ class APIHandler:
|
|||||||
raise ConnectionError(f"Invalid API key or authentication method.")
|
raise ConnectionError(f"Invalid API key or authentication method.")
|
||||||
case 404:
|
case 404:
|
||||||
raise ConnectionError(f"404: Not Found. This means there's no resource with this elabid (wrong elabid?) on your eLabFTW (wrong endpoint?).")
|
raise ConnectionError(f"404: Not Found. This means there's no resource with this elabid (wrong elabid?) on your eLabFTW (wrong endpoint?).")
|
||||||
|
case 400:
|
||||||
|
raise ConnectionError(f"400: Bad Request. This means the API endpoint you tried to reach is invalid. Did you tamper with the source code? If not, contact the developer.")
|
||||||
case _:
|
case _:
|
||||||
raise ConnectionError(f"HTTP request failed with status code: {response.status_code} (NOTE: 4xx means user's fault).")
|
raise ConnectionError(f"HTTP request failed with status code: {response.status_code} (NOTE: 4xx means user's fault).")
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -13,16 +13,17 @@ class Layer:
|
|||||||
'''
|
'''
|
||||||
def __init__(self, layer_data):
|
def __init__(self, layer_data):
|
||||||
try:
|
try:
|
||||||
|
self.operator = layer_data["fullname"]
|
||||||
self.extra = layer_data["metadata_decoded"]["extra_fields"]
|
self.extra = layer_data["metadata_decoded"]["extra_fields"]
|
||||||
self.layer_number = self.extra["Layer Progressive Number"]["value"] # integer
|
self.layer_number = self.extra["Layer Progressive Number"]["value"] # integer
|
||||||
self.target_elabid = self.extra["Target"]["value"] # elabid
|
self.target_elabid = self.extra["Target"]["value"] # elabid
|
||||||
self.rheed_system_elabid = self.extra["RHEED System"]["value"] # elabid
|
|
||||||
self.laser_system_elabid = self.extra["Laser System"]["value"] # elabid
|
self.laser_system_elabid = self.extra["Laser System"]["value"] # elabid
|
||||||
self.start_time = layer_data.get("created_at")
|
self.chamber_elabid = self.extra["Chamber"]["value"] # elabid
|
||||||
self.operator = layer_data.get("fullname")
|
self.rheed_system_elabid = self.extra["RHEED System"]["value"] # elabid
|
||||||
self.description = layer_data.get("body")
|
|
||||||
self.deposition_time = self.extra["Duration"]["value"]
|
self.deposition_time = self.extra["Duration"]["value"]
|
||||||
|
self.deposition_time_unit = self.extra["Duration"]["unit"]
|
||||||
self.repetition_rate = self.extra["Repetition rate"]["value"]
|
self.repetition_rate = self.extra["Repetition rate"]["value"]
|
||||||
|
self.repetition_rate_unit = self.extra["Repetition rate"]["unit"]
|
||||||
try:
|
try:
|
||||||
self.number_of_pulses = (float(self.deposition_time) * float(self.repetition_rate)).__floor__()
|
self.number_of_pulses = (float(self.deposition_time) * float(self.repetition_rate)).__floor__()
|
||||||
except ValueError:
|
except ValueError:
|
||||||
@@ -33,15 +34,22 @@ class Layer:
|
|||||||
Please edit your eLabFTW entry and retry.
|
Please edit your eLabFTW entry and retry.
|
||||||
""")
|
""")
|
||||||
self.temperature = self.extra["Heater temperature"]["value"] # Note: this field used to have a trailing space in its name
|
self.temperature = self.extra["Heater temperature"]["value"] # Note: this field used to have a trailing space in its name
|
||||||
|
self.temperature_unit = self.extra["Heater temperature"]["unit"]
|
||||||
self.process_pressure = self.extra["Process pressure"]["value"] # Note: this field used to have a trailing space in its name
|
self.process_pressure = self.extra["Process pressure"]["value"] # Note: this field used to have a trailing space in its name
|
||||||
|
self.process_pressure_unit = self.extra["Process pressure"]["unit"]
|
||||||
self.heating_method = self.extra["Heating Method"]["value"]
|
self.heating_method = self.extra["Heating Method"]["value"]
|
||||||
self.layer_thickness = self.extra["Thickness"]["value"]
|
self.layer_thickness = self.extra["Thickness"]["value"]
|
||||||
|
self.layer_thickness_unit = self.extra["Thickness"]["unit"]
|
||||||
self.buffer_gas = self.extra["Buffer gas"]["value"]
|
self.buffer_gas = self.extra["Buffer gas"]["value"]
|
||||||
self.heater_target_distance = self.extra["Heater-target distance"]["value"]
|
self.heater_target_distance = self.extra["Heater-target distance"]["value"]
|
||||||
|
self.heater_target_distance_unit = self.extra["Heater-target distance"]["unit"]
|
||||||
self.laser_fluence = self.extra["Laser Intensity"]["value"] # here fluence = intensity
|
self.laser_fluence = self.extra["Laser Intensity"]["value"] # here fluence = intensity
|
||||||
|
self.laser_fluence_unit = "J/(s cm^2)"
|
||||||
self.laser_spot_area = self.extra["Spot Area"]["value"]
|
self.laser_spot_area = self.extra["Spot Area"]["value"]
|
||||||
|
self.laser_spot_area_unit = "mm^2"
|
||||||
try:
|
try:
|
||||||
self.laser_energy = (float(self.laser_fluence) * float(self.laser_spot_area)).__round__(3)
|
self.laser_energy = ( float(self.laser_fluence) * float(self.laser_spot_area) / 100 ).__round__(3)
|
||||||
|
self.laser_energy_unit = "J/s"
|
||||||
except ValueError:
|
except ValueError:
|
||||||
# Since laser_energy is NOT required, if it can't be calculated warn user but allow the software to continue execution:
|
# Since laser_energy is NOT required, if it can't be calculated warn user but allow the software to continue execution:
|
||||||
print("""
|
print("""
|
||||||
@@ -60,17 +68,37 @@ class Layer:
|
|||||||
self.pre_annealing_pressure = self.extra["Process pressure Pre"]["value"]
|
self.pre_annealing_pressure = self.extra["Process pressure Pre"]["value"]
|
||||||
self.pre_annealing_temperature = self.extra["Heater temperature Pre"]["value"]
|
self.pre_annealing_temperature = self.extra["Heater temperature Pre"]["value"]
|
||||||
self.pre_annealing_duration = self.extra["Duration Pre"]["value"]
|
self.pre_annealing_duration = self.extra["Duration Pre"]["value"]
|
||||||
|
self.pre_annealing_pressure_unit = self.extra["Process pressure Pre"]["unit"]
|
||||||
|
self.pre_annealing_temperature_unit = self.extra["Heater temperature Pre"]["unit"]
|
||||||
|
self.pre_annealing_duration_unit = self.extra["Duration Pre"]["unit"]
|
||||||
# Post annealing section
|
# Post annealing section
|
||||||
self.post_annealing_ambient_gas = self.extra["Buffer gas PA"]["value"]
|
self.post_annealing_ambient_gas = self.extra["Buffer gas PA"]["value"]
|
||||||
self.post_annealing_pressure = self.extra["Process pressure PA"]["value"]
|
self.post_annealing_pressure = self.extra["Process pressure PA"]["value"]
|
||||||
self.post_annealing_temperature = self.extra["Heater temperature PA"]["value"]
|
self.post_annealing_temperature = self.extra["Heater temperature PA"]["value"]
|
||||||
self.post_annealing_duration = self.extra["Duration PA"]["value"]
|
self.post_annealing_duration = self.extra["Duration PA"]["value"]
|
||||||
|
self.post_annealing_pressure_unit = self.extra["Process pressure PA"]["unit"]
|
||||||
|
self.post_annealing_temperature_unit = self.extra["Heater temperature PA"]["unit"]
|
||||||
|
self.post_annealing_duration_unit = self.extra["Duration PA"]["unit"]
|
||||||
|
|
||||||
# Rejected but suggested by the NeXus standard:
|
# Rejected but suggested by the NeXus standard:
|
||||||
#self.laser_rastering_coefficients = None
|
#self.laser_rastering_coefficients = None
|
||||||
except KeyError as k:
|
except KeyError as k:
|
||||||
# Some keys are not required and can be called through the .get() method - which is permissive and allows null values;
|
# Some keys are not required and can be called through the .get() method - which is permissive and allows null values;
|
||||||
# Other keys are required so if they can't be called (invalid or null) raise error and stop execution of the program:
|
# Other keys are required so if they can't be called (invalid or null) raise error and stop execution of the program:
|
||||||
raise KeyError(f"The provided dictionary lacks a \"{k}\" key. Check the deposition layer entry on eLabFTW and make sure you used the correct Experiment template.")
|
raise KeyError(f"The provided dictionary lacks a \"{k}\" key. Check the deposition layer entry on eLabFTW and make sure you used the correct Experiment template.")
|
||||||
|
# Optional
|
||||||
|
self.start_time = layer_data.get("created_at") or None
|
||||||
|
self.description = layer_data.get("body") or None
|
||||||
|
def get_instruments(self, apikey):
|
||||||
|
raw_lasersys_data = APIHandler(apikey).get_entry_from_elabid(self.laser_system_elabid, entryType="items")
|
||||||
|
raw_chamber_data = APIHandler(apikey).get_entry_from_elabid(self.chamber_elabid, entryType="items")
|
||||||
|
raw_rheedsys_data = APIHandler(apikey).get_entry_from_elabid(self.rheed_system_elabid, entryType="items")
|
||||||
|
instruments_used = {
|
||||||
|
"laser_system": raw_lasersys_data.get("title") or None,
|
||||||
|
"deposition_chamber": raw_chamber_data.get("title") or None,
|
||||||
|
"rheed_system": raw_rheedsys_data.get("title") or None,
|
||||||
|
}
|
||||||
|
return instruments_used
|
||||||
|
|
||||||
class Entrypoint:
|
class Entrypoint:
|
||||||
'''
|
'''
|
||||||
@@ -111,7 +139,13 @@ class Material:
|
|||||||
self.name = material_data["title"] # required
|
self.name = material_data["title"] # required
|
||||||
self.extra = material_data["metadata_decoded"]["extra_fields"]
|
self.extra = material_data["metadata_decoded"]["extra_fields"]
|
||||||
self.compound_elabid = self.extra["Compound"]["value"]
|
self.compound_elabid = self.extra["Compound"]["value"]
|
||||||
self.dimensions = self.extra["Size"]["value"]
|
self.dimensions = str(self.extra["Size"]["value"]) # strings have a .count() method
|
||||||
|
if self.dimensions.count("mm") == 2:
|
||||||
|
self.dimensions_unit = "mm x mm"
|
||||||
|
elif self.dimensions[-1] == '"':
|
||||||
|
self.dimensions_unit = "inches"
|
||||||
|
else:
|
||||||
|
self.dimensions_unit = None
|
||||||
except KeyError as k:
|
except KeyError as k:
|
||||||
# Some keys are not required and can be called through the .get() method - which is permissive and allows null values;
|
# Some keys are not required and can be called through the .get() method - which is permissive and allows null values;
|
||||||
# Other keys are required so if they can't be called (invalid or null) raise error and stop execution of the program:
|
# Other keys are required so if they can't be called (invalid or null) raise error and stop execution of the program:
|
||||||
@@ -138,9 +172,11 @@ class Substrate(Material):
|
|||||||
try:
|
try:
|
||||||
self.orientation = self.extra["Orientation"]["value"]
|
self.orientation = self.extra["Orientation"]["value"]
|
||||||
self.miscut_angle = self.extra["Miscut Angle"]["value"]
|
self.miscut_angle = self.extra["Miscut Angle"]["value"]
|
||||||
|
self.miscut_angle_unit = self.extra["Miscut Angle"]["unit"]
|
||||||
self.miscut_direction = self.extra["Miscut Direction"]["value"]
|
self.miscut_direction = self.extra["Miscut Direction"]["value"]
|
||||||
# Not present (yet) on eLabFTW for Substrates:
|
# Not present (yet) on eLabFTW for Substrates:
|
||||||
self.thickness = None #self.extra["Thickness"]["value"]
|
self.thickness = "" #self.extra["Thickness"]["value"]
|
||||||
|
self.thickness_unit = "μm" #self.extra["Thickness"]["unit"]
|
||||||
self.surface_treatment = self.extra["Surface treatment"]["value"]
|
self.surface_treatment = self.extra["Surface treatment"]["value"]
|
||||||
self.manufacturer = self.extra["Supplier"]["value"]
|
self.manufacturer = self.extra["Supplier"]["value"]
|
||||||
self.batch_id = self.extra["Batch ID"]["value"]
|
self.batch_id = self.extra["Batch ID"]["value"]
|
||||||
@@ -152,11 +188,14 @@ class Target(Material):
|
|||||||
super().__init__(material_data)
|
super().__init__(material_data)
|
||||||
try:
|
try:
|
||||||
self.thickness = self.extra["Thickness"]["value"]
|
self.thickness = self.extra["Thickness"]["value"]
|
||||||
|
self.thickness_unit = self.extra["Thickness"]["unit"]
|
||||||
self.shape = self.extra["shape"]["value"]
|
self.shape = self.extra["shape"]["value"]
|
||||||
self.solid_form = self.extra["Solid form"]["value"]
|
self.solid_form = self.extra["Solid form"]["value"]
|
||||||
self.manufacturer = self.extra["Supplier"]["value"]
|
self.manufacturer = self.extra["Supplier"]["value"]
|
||||||
except KeyError as k:
|
except KeyError as k:
|
||||||
raise KeyError(f"The provided dictionary lacks a \"{k}\" key - which is specific for PLD targets. Check the {self.name} target entry on eLabFTW and make sure you used the correct Resource template.")
|
raise KeyError(f"The provided dictionary lacks a \"{k}\" key - which is specific for PLD targets. Check the {self.name} target entry on eLabFTW and make sure you used the correct Resource template.")
|
||||||
|
# Non-required attributes:
|
||||||
|
self.description = material_data.get("body") or ""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,62 +0,0 @@
|
|||||||
"""
|
|
||||||
Currently unused!
|
|
||||||
"""
|
|
||||||
import json, requests
|
|
||||||
from APIHandler import APIHandler
|
|
||||||
|
|
||||||
def get_entry_from_elabid(elabid, entryType="items"):
|
|
||||||
'''
|
|
||||||
Function which returns entrypoint data (as dictionary) from its elabid.
|
|
||||||
'''
|
|
||||||
header = APIHandler(apikey).dump
|
|
||||||
response = requests.get(
|
|
||||||
headers = header,
|
|
||||||
url = f"{ELABFTW_API_URL}/{entryType}/{elabid}",
|
|
||||||
verify=True
|
|
||||||
)
|
|
||||||
if response.status_code // 100 in [2,3]:
|
|
||||||
entry_data = response.json()
|
|
||||||
return entry_data
|
|
||||||
else:
|
|
||||||
raise ConnectionError(f"HTTP request failed with status code: {response.status_code}.")
|
|
||||||
|
|
||||||
def get_sample_layers_data(elabid):
|
|
||||||
'''
|
|
||||||
Return the following data from every eLabFTW experiment linked
|
|
||||||
to a certain sample, identified by elabid.
|
|
||||||
|
|
||||||
- Title of the experiment
|
|
||||||
- Category (should check it's "PLD Deposition")
|
|
||||||
- Layer number - if present (PLD depositions)
|
|
||||||
- Deposition time - returns error if not present
|
|
||||||
- Repetition rate - returns error if not present
|
|
||||||
'''
|
|
||||||
# header = {
|
|
||||||
# "Authorization": apikey,
|
|
||||||
# "Content-Type": "application/json"
|
|
||||||
# }
|
|
||||||
sample_data = requests.get(
|
|
||||||
headers = header,
|
|
||||||
url = f"https://elabftw.fisica.unina.it/api/v2/items/{elabid}",
|
|
||||||
verify=True
|
|
||||||
).json()
|
|
||||||
related_experiments = sample_data["related_experiments_links"]
|
|
||||||
result = []
|
|
||||||
for exp in related_experiments:
|
|
||||||
experiment_data = requests.get(
|
|
||||||
headers = header,
|
|
||||||
url = f"https://elabftw.fisica.unina.it/api/v2/experiments/{exp.get("entityid")}",
|
|
||||||
verify=True
|
|
||||||
).json()
|
|
||||||
extra = experiment_data["metadata_decoded"]["extra_fields"]
|
|
||||||
result.append(
|
|
||||||
{"title": exp.get("title"),
|
|
||||||
"layer_number": extra.get("Layer Progressive Number").get("value"),
|
|
||||||
"category": exp.get("category_title"),
|
|
||||||
"deposition_time": extra.get("Duration").get("value"),
|
|
||||||
"repetition_rate": extra.get("Repetition rate").get("value")}
|
|
||||||
)
|
|
||||||
return result
|
|
||||||
|
|
||||||
if __name__=="__main__":
|
|
||||||
print("Warning: you're not supposed to be running this as the main program.")
|
|
||||||
516
src/main.py
516
src/main.py
@@ -1,4 +1,5 @@
|
|||||||
import os, json, requests
|
import os, json, requests, h5py
|
||||||
|
import numpy as np
|
||||||
from getpass import getpass
|
from getpass import getpass
|
||||||
from APIHandler import APIHandler
|
from APIHandler import APIHandler
|
||||||
from classes import *
|
from classes import *
|
||||||
@@ -98,71 +99,464 @@ def chain_layer_to_target(layer_object):
|
|||||||
material_object = call_material_from_elabid(target_elabid)
|
material_object = call_material_from_elabid(target_elabid)
|
||||||
return material_object
|
return material_object
|
||||||
|
|
||||||
#sample_object = call_entrypoint_from_elabid(elabid)
|
def deduplicate_instruments_from_layers(layers):
|
||||||
#from_entrypoint_to_material(sample_object)
|
'''
|
||||||
|
Takes a list of Layer-class objects and for each layer gets the instruments used (laser, depo chamber and RHEED), returns dictionary with one item per category. This means that if more layers share the same instruments it returns a dictionary with just their names as strings (no lists or sub-dictionaries).
|
||||||
|
|
||||||
|
If different layers have different instruments (e.g. laser systems) the user is prompted to only select one.
|
||||||
|
'''
|
||||||
|
lasers = []
|
||||||
|
chambers = []
|
||||||
|
rheeds = []
|
||||||
|
elegant_dict = {}
|
||||||
|
for lyr in layers:
|
||||||
|
instruments = lyr.get_instruments(apikey)
|
||||||
|
lasers.append(instruments["laser_system"])
|
||||||
|
chambers.append(instruments["deposition_chamber"])
|
||||||
|
rheeds.append(instruments["rheed_system"])
|
||||||
|
elegant_dict[f"layer_{lyr.layer_number}"] = {
|
||||||
|
"laser_system": instruments["laser_system"],
|
||||||
|
"deposition_chamber": instruments["deposition_chamber"],
|
||||||
|
"rheed_system": instruments["rheed_system"],
|
||||||
|
}
|
||||||
|
ded_lasers = list( set( lasers ) )
|
||||||
|
ded_chambers = list( set( chambers ) )
|
||||||
|
ded_rheeds = list( set( rheeds ) )
|
||||||
|
elegant_dict["multilayer"] = {
|
||||||
|
# Keep key names human readable since they're used in the messages of the following errors
|
||||||
|
"laser_system": ", ".join(ded_lasers),
|
||||||
|
"deposition_chamber": ", ".join(ded_chambers),
|
||||||
|
"rheed_system": ", ".join(ded_rheeds)
|
||||||
|
} # dictionary's name is a joke
|
||||||
|
# updated_dict = {} # use this for containing the final dataset
|
||||||
|
# for ded in elegant_dict:
|
||||||
|
# if len(elegant_dict[ded]) == 0:
|
||||||
|
# # if len of list is 0 - empty list - raise error
|
||||||
|
# raise IndexError(f"Missing data: no Laser System, Chamber and/or RHEED System is specified in any of the Deposition-type experiments related to this sample. Fix this on eLabFTW before retrying. Affected list: {ded}.")
|
||||||
|
# elif len(elegant_dict[ded]) > 1:
|
||||||
|
# # if len of list is > 1 - too many values - allow the user to pick one
|
||||||
|
# print("Warning: different instruments have been used for different layers - which is currently not allowed.")
|
||||||
|
# # there's a better way to do this but I can't remember now for the life of me...
|
||||||
|
# i = 0
|
||||||
|
# while i < len(elegant_dict[ded]):
|
||||||
|
# print(f"{i} - {elegant_dict[ded][i]}")
|
||||||
|
# i += 1
|
||||||
|
# ans = None
|
||||||
|
# while not type(ans) == int or not ans in range(0, len(elegant_dict[ded])):
|
||||||
|
# ans = input("Please pick one of the previous (0, 1, ...) [default = 0]: ") or "0"
|
||||||
|
# if ans.isdigit():
|
||||||
|
# ans = int(ans)
|
||||||
|
# continue # unnecessary?
|
||||||
|
# updated_dict[ded] = elegant_dict[ded][ans]
|
||||||
|
# elif elegant_dict[ded][0] in ["", 0, None]:
|
||||||
|
# # if len is 1 BUT value is "", 0 or None raise error
|
||||||
|
# raise ValueError(f"Missing data: a Laser System, Chamber and/or RHEED System which is specified across all the Deposition-type experiments related to this sample is either empty or invalid. Fix this on eLabFTW before retrying. Affected list: {ded}.")
|
||||||
|
# else:
|
||||||
|
# # if none of the previous (only 1 value), that single value is used
|
||||||
|
# updated_dict[ded] = elegant_dict[ded][0]
|
||||||
|
# instruments_used_dict = {
|
||||||
|
# "laser_system": updated_dict["Laser Systems"],
|
||||||
|
# "deposition_chamber": updated_dict["Deposition Chamber"],
|
||||||
|
# "rheed_system": updated_dict["RHEED Systems"],
|
||||||
|
# }
|
||||||
|
return elegant_dict
|
||||||
|
|
||||||
|
### OLD CODE
|
||||||
|
# if 0 in [ len(i) for i in elegant_list ]:
|
||||||
|
# # i.e. if length of one of the lists in elegant_list is zero (missing data):
|
||||||
|
# raise IndexError("Missing data: no Laser System, Chamber and/or RHEED System is specified in any of the Deposition-type experiments related to this sample.")
|
||||||
|
# if not all([ len(i) == 1 for i in elegant_list ]):
|
||||||
|
# print("Warning: different instruments have been used for different layers - which is currently not allowed.")
|
||||||
|
# # for every element in elegant list check if len > 1 and if it is
|
||||||
|
# print("Selecting the first occurence for every category...")
|
||||||
|
###
|
||||||
|
# lasers = { f"layer_{lyr.layer_number}": lyr.laser_system for lyr in layers }
|
||||||
|
# chambers = { f"layer_{lyr.layer_number}": lyr.deposition_chamber for lyr in layers }
|
||||||
|
# rheeds = { f"layer_{lyr.layer_number}": lyr.rheed_system for lyr in layers }
|
||||||
|
# instruments_used_dict = {
|
||||||
|
# "laser_system": lasers,
|
||||||
|
# "deposition_chamber": chambers,
|
||||||
|
# "rheed_system": rheeds,
|
||||||
|
# }
|
||||||
|
|
||||||
|
def analyse_rheed_data(data):
|
||||||
|
'''
|
||||||
|
Takes the content of a tsv file and returns a dictionary with timestamps and intensities.
|
||||||
|
The file should contain a 2D array composed of 4 columns - where the first column is a timestamp and the other three are RHEED intensities - and an unspecified number of rows.
|
||||||
|
|
||||||
|
-----
|
||||||
|
Time Layer1_Int1 Layer1_Int2 Layer1_Int3
|
||||||
|
-----
|
||||||
|
|
||||||
|
Distinct ValueErrors are raised if:
|
||||||
|
- The array is not 2-dimensional;
|
||||||
|
- The total number of columns does not equate exactly 1+3 (= 4).
|
||||||
|
|
||||||
|
Time is expressed in seconds, intensities are normalized (adimensional).
|
||||||
|
|
||||||
|
# TO-DO: complete this description...
|
||||||
|
Written with help from DeepSeek.
|
||||||
|
'''
|
||||||
|
# Verifying the format of the input file:
|
||||||
|
if data.ndim != 2:
|
||||||
|
raise ValueError(f"Unexpected trace format: expected 2D array, got ndim = {data.ndim}.")
|
||||||
|
n_cols = data.shape[1] # 0 = rows, 1 = columns
|
||||||
|
if n_cols > 4:
|
||||||
|
print(f"Warning! The input file (for Realtime Window Analysis) has {n_cols-4} more than needed.\nOnly 4 columns will be considered - with the first representing time and the others representing RHEED intensities.")
|
||||||
|
if n_cols < 4:
|
||||||
|
raise ValueError(f"Insufficient number of columns: expected 4, got n_cols = {n_cols}.")
|
||||||
|
n_time_points = data.shape[0]
|
||||||
|
|
||||||
|
# Get time (all rows of col 0) as Float64:
|
||||||
|
time = data[:, 0].astype(np.float64, copy=False) # copy=False suggested by LLM for mem. eff.
|
||||||
|
|
||||||
|
# Get intensities (all rows of cols 1,2,3) as Float32:
|
||||||
|
intensities = data[:, 1:4].astype(np.float32, copy=False)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"time": time,
|
||||||
|
"intensity": intensities,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def make_nexus_schema_dictionary(substrate_object, layers):
|
||||||
|
'''
|
||||||
|
Main function, takes all the other functions to reconstruct the full dataset. Takes a Substrate-class object (output of the chain_entrypoint_to_batch() function) and a list of Layer-class objects (output of the chain_entrypoint_to_layers() function), returns dictionary with the same schema as the NeXus standard for PLD fabrications.
|
||||||
|
'''
|
||||||
|
instruments = deduplicate_instruments_from_layers(layers)
|
||||||
|
pld_fabrication = {
|
||||||
|
"sample": {
|
||||||
|
"substrate": {
|
||||||
|
"name": substrate_object.name,
|
||||||
|
"chemical_formula" : substrate_object.get_compound_formula(apikey),
|
||||||
|
"orientation" : substrate_object.orientation,
|
||||||
|
"miscut_angle" : {
|
||||||
|
"value": substrate_object.miscut_angle,
|
||||||
|
"units": substrate_object.miscut_angle_unit
|
||||||
|
},
|
||||||
|
"miscut_direction" : substrate_object.miscut_direction,
|
||||||
|
"thickness" : {
|
||||||
|
"value": substrate_object.thickness,
|
||||||
|
"units": substrate_object.thickness_unit,
|
||||||
|
},
|
||||||
|
"dimensions" : substrate_object.dimensions,
|
||||||
|
"surface_treatment" : substrate_object.surface_treatment,
|
||||||
|
"manufacturer" : substrate_object.manufacturer,
|
||||||
|
"batch_id" : substrate_object.batch_id,
|
||||||
|
},
|
||||||
|
"multilayer": {},
|
||||||
|
},
|
||||||
|
"instruments_used": instruments["multilayer"],
|
||||||
|
}
|
||||||
|
multilayer = pld_fabrication["sample"]["multilayer"]
|
||||||
|
for layer in layers:
|
||||||
|
name = "layer_" + layer.layer_number
|
||||||
|
target_object = chain_layer_to_target(layer)
|
||||||
|
target_dict = {
|
||||||
|
"name": target_object.name,
|
||||||
|
"chemical_formula" : target_object.get_compound_formula(apikey),
|
||||||
|
"description" : target_object.description,
|
||||||
|
"shape" : target_object.shape,
|
||||||
|
"dimensions" : target_object.dimensions,
|
||||||
|
"thickness" : {
|
||||||
|
"value": target_object.thickness,
|
||||||
|
"units": target_object.thickness_unit,
|
||||||
|
},
|
||||||
|
"solid_form" : target_object.solid_form,
|
||||||
|
"manufacturer" : target_object.manufacturer,
|
||||||
|
"batch_id" : target_object.name,
|
||||||
|
# TO-DO: currently not available:
|
||||||
|
}
|
||||||
|
multilayer[name] = {
|
||||||
|
"target": target_dict,
|
||||||
|
"start_time": layer.start_time,
|
||||||
|
"operator": layer.operator,
|
||||||
|
"description": layer.description,
|
||||||
|
"number_of_pulses": layer.number_of_pulses,
|
||||||
|
"deposition_time": {
|
||||||
|
"value": layer.deposition_time,
|
||||||
|
"units": layer.deposition_time_unit,
|
||||||
|
},
|
||||||
|
"temperature": {
|
||||||
|
"value": layer.temperature,
|
||||||
|
"units": layer.temperature_unit,
|
||||||
|
},
|
||||||
|
"heating_method": layer.heating_method,
|
||||||
|
"layer_thickness": {
|
||||||
|
"value": layer.layer_thickness,
|
||||||
|
"units": layer.layer_thickness_unit,
|
||||||
|
},
|
||||||
|
"buffer_gas": layer.buffer_gas,
|
||||||
|
"process_pressure": {
|
||||||
|
"value": layer.process_pressure,
|
||||||
|
"units": layer.process_pressure_unit,
|
||||||
|
},
|
||||||
|
"heater_target_distance": {
|
||||||
|
"value": layer.heater_target_distance,
|
||||||
|
"units": layer.heater_target_distance_unit,
|
||||||
|
},
|
||||||
|
"repetition_rate": {
|
||||||
|
"value": layer.repetition_rate,
|
||||||
|
"units": layer.repetition_rate_unit,
|
||||||
|
},
|
||||||
|
"laser_fluence": {
|
||||||
|
"value": layer.laser_fluence,
|
||||||
|
"units": layer.laser_fluence_unit,
|
||||||
|
},
|
||||||
|
"laser_spot_area": {
|
||||||
|
"value": layer.laser_spot_area,
|
||||||
|
"units": layer.laser_spot_area_unit,
|
||||||
|
},
|
||||||
|
"laser_energy": {
|
||||||
|
"value": layer.laser_energy,
|
||||||
|
"units": layer.laser_energy_unit,
|
||||||
|
},
|
||||||
|
"laser_rastering": {
|
||||||
|
"geometry": layer.laser_rastering_geometry,
|
||||||
|
"positions": layer.laser_rastering_positions,
|
||||||
|
"velocities": layer.laser_rastering_velocities,
|
||||||
|
},
|
||||||
|
"pre_annealing": {
|
||||||
|
"ambient_gas": layer.pre_annealing_ambient_gas,
|
||||||
|
"pressure": {
|
||||||
|
"value": layer.pre_annealing_pressure,
|
||||||
|
"units": layer.pre_annealing_pressure_unit,
|
||||||
|
},
|
||||||
|
"temperature": {
|
||||||
|
"value": layer.pre_annealing_temperature,
|
||||||
|
"units": layer.pre_annealing_temperature_unit,
|
||||||
|
},
|
||||||
|
"duration": {
|
||||||
|
"value": layer.pre_annealing_duration,
|
||||||
|
"units": layer.pre_annealing_duration_unit,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"post_annealing": {
|
||||||
|
"ambient_gas": layer.post_annealing_ambient_gas,
|
||||||
|
"pressure": {
|
||||||
|
"value": layer.post_annealing_pressure,
|
||||||
|
"units": layer.post_annealing_pressure_unit,
|
||||||
|
},
|
||||||
|
"temperature": {
|
||||||
|
"value": layer.post_annealing_temperature,
|
||||||
|
"units": layer.post_annealing_temperature_unit,
|
||||||
|
},
|
||||||
|
"duration": {
|
||||||
|
"value": layer.post_annealing_duration,
|
||||||
|
"units": layer.post_annealing_duration_unit,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"instruments_used": instruments[name],
|
||||||
|
}
|
||||||
|
return pld_fabrication
|
||||||
|
|
||||||
|
def build_nexus_file(pld_fabrication, output_path, rheed_osc=None):
|
||||||
|
# NOTE: look at the mail attachment from Emiliano...
|
||||||
|
with h5py.File(output_path, "w") as f:
|
||||||
|
nx_pld_entry = f.create_group("pld_fabrication")
|
||||||
|
nx_pld_entry.attrs["NX_class"] = "NXentry"
|
||||||
|
|
||||||
|
# Sample section
|
||||||
|
nx_sample = nx_pld_entry.create_group("sample")
|
||||||
|
nx_sample.attrs["NX_class"] = "NXsample"
|
||||||
|
sample_dict = pld_fabrication["sample"]
|
||||||
|
|
||||||
|
# Substrate sub-section
|
||||||
|
nx_substrate = nx_sample.create_group("substrate")
|
||||||
|
nx_substrate.attrs["NX_class"] = "NXsubentry"
|
||||||
|
substrate_dict = sample_dict["substrate"]
|
||||||
|
try:
|
||||||
|
# Substrate fields (datasets)
|
||||||
|
nx_substrate.create_dataset("name", data=substrate_dict["name"])
|
||||||
|
nx_substrate.create_dataset("chemical_formula", data=substrate_dict["chemical_formula"])
|
||||||
|
nx_substrate.create_dataset("orientation", data=substrate_dict["orientation"])
|
||||||
|
nx_substrate.create_dataset("miscut_angle", data=substrate_dict["miscut_angle"]["value"]) # float
|
||||||
|
nx_substrate["miscut_angle"].attrs["units"] = substrate_dict["miscut_angle"]["units"]
|
||||||
|
nx_substrate.create_dataset("miscut_direction", data=substrate_dict["miscut_direction"])
|
||||||
|
nx_substrate.create_dataset("thickness", data=substrate_dict["thickness"]["value"]) # float/int
|
||||||
|
nx_substrate["thickness"].attrs["units"] = substrate_dict["thickness"]["units"]
|
||||||
|
nx_substrate.create_dataset("dimensions", data=substrate_dict["dimensions"])
|
||||||
|
nx_substrate.create_dataset("surface_treatment", data=substrate_dict["surface_treatment"])
|
||||||
|
nx_substrate.create_dataset("manufacturer", data=substrate_dict["manufacturer"])
|
||||||
|
nx_substrate.create_dataset("batch_id", data=substrate_dict["batch_id"])
|
||||||
|
except TypeError as te:
|
||||||
|
# sooner or later I'll handle this too - not today tho
|
||||||
|
raise TypeError(te)
|
||||||
|
|
||||||
|
# Multilayer sub-section
|
||||||
|
nx_multilayer = nx_sample.create_group("multilayer")
|
||||||
|
nx_multilayer.attrs["NX_class"] = "NXsubentry"
|
||||||
|
multilayer_dict = sample_dict["multilayer"]
|
||||||
|
# Repeat FOR EACH LAYER:
|
||||||
|
for layer in multilayer_dict:
|
||||||
|
nx_layer = nx_multilayer.create_group(layer)
|
||||||
|
nx_layer.attrs["NX_class"] = "NXsubentry"
|
||||||
|
layer_dict = multilayer_dict[layer]
|
||||||
|
# Sub-groups of a layer
|
||||||
|
## Target
|
||||||
|
nx_target = nx_layer.create_group("target")
|
||||||
|
nx_target.attrs["NX_class"] = "NXsample"
|
||||||
|
target_dict = layer_dict["target"]
|
||||||
|
## Rastering and Annealing
|
||||||
|
nx_laser_rastering = nx_layer.create_group("laser_rastering")
|
||||||
|
nx_laser_rastering.attrs["NX_class"] = "NXprocess"
|
||||||
|
rastering_dict = layer_dict["laser_rastering"]
|
||||||
|
nx_pre_annealing = nx_layer.create_group("pre_annealing")
|
||||||
|
nx_pre_annealing.attrs["NX_class"] = "NXprocess"
|
||||||
|
pre_ann_dict = layer_dict["pre_annealing"]
|
||||||
|
nx_post_annealing = nx_layer.create_group("post_annealing")
|
||||||
|
nx_post_annealing.attrs["NX_class"] = "NXprocess"
|
||||||
|
post_ann_dict = layer_dict["post_annealing"]
|
||||||
|
nx_layer_instruments = nx_layer.create_group("instruments_used")
|
||||||
|
nx_layer_instruments.attrs["NX_class"] = "NXinstrument"
|
||||||
|
layer_instruments_dict = layer_dict["instruments_used"]
|
||||||
|
|
||||||
|
## Target metadata
|
||||||
|
try:
|
||||||
|
nx_target.create_dataset("name", data = target_dict["name"])
|
||||||
|
nx_target.create_dataset("chemical_formula", data = target_dict["chemical_formula"])
|
||||||
|
nx_target.create_dataset("description", data = target_dict["description"])
|
||||||
|
nx_target.create_dataset("shape", data = target_dict["shape"])
|
||||||
|
nx_target.create_dataset("dimensions", data = target_dict["dimensions"])
|
||||||
|
nx_target.create_dataset("thickness", data = target_dict["thickness"]["value"]) # float/int
|
||||||
|
nx_target["thickness"].attrs["units"] = target_dict["thickness"]["units"]
|
||||||
|
nx_target.create_dataset("solid_form", data = target_dict["solid_form"])
|
||||||
|
nx_target.create_dataset("manufacturer", data = target_dict["manufacturer"])
|
||||||
|
nx_target.create_dataset("batch_id", data = target_dict["batch_id"])
|
||||||
|
except TypeError as te:
|
||||||
|
raise TypeError(te)
|
||||||
|
## Other layer-specific metadata
|
||||||
|
try:
|
||||||
|
nx_layer.create_dataset("start_time", data = layer_dict["start_time"])
|
||||||
|
nx_layer.create_dataset("operator", data = layer_dict["operator"])
|
||||||
|
nx_layer.create_dataset("number_of_pulses", data = layer_dict["number_of_pulses"])
|
||||||
|
nx_layer.create_dataset("deposition_time", data = layer_dict["deposition_time"]["value"])
|
||||||
|
nx_layer["deposition_time"].attrs["units"] = layer_dict["deposition_time"]["units"]
|
||||||
|
nx_layer.create_dataset("repetition_rate", data = layer_dict["repetition_rate"]["value"])
|
||||||
|
nx_layer["repetition_rate"].attrs["units"] = layer_dict["repetition_rate"]["units"]
|
||||||
|
nx_layer.create_dataset("temperature", data = layer_dict["temperature"]["value"])
|
||||||
|
nx_layer["temperature"].attrs["units"] = layer_dict["temperature"]["units"]
|
||||||
|
nx_layer.create_dataset("heating_method", data = layer_dict["heating_method"])
|
||||||
|
nx_layer.create_dataset("layer_thickness", data = layer_dict["layer_thickness"]["value"])
|
||||||
|
nx_layer["layer_thickness"].attrs["units"] = layer_dict["layer_thickness"]["units"]
|
||||||
|
nx_layer.create_dataset("buffer_gas", data = layer_dict["buffer_gas"])
|
||||||
|
nx_layer.create_dataset("process_pressure", data = layer_dict["process_pressure"]["value"])
|
||||||
|
nx_layer["process_pressure"].attrs["units"] = layer_dict["process_pressure"]["units"]
|
||||||
|
nx_layer.create_dataset("heater_target_distance", data = layer_dict["heater_target_distance"]["value"])
|
||||||
|
nx_layer["heater_target_distance"].attrs["units"] = layer_dict["heater_target_distance"]["units"]
|
||||||
|
nx_layer.create_dataset("laser_fluence", data = layer_dict["laser_fluence"]["value"])
|
||||||
|
nx_layer["laser_fluence"].attrs["units"] = layer_dict["laser_fluence"]["units"]
|
||||||
|
nx_layer.create_dataset("laser_spot_area", data = layer_dict["laser_spot_area"]["value"])
|
||||||
|
nx_layer["laser_spot_area"].attrs["units"] = layer_dict["laser_spot_area"]["units"]
|
||||||
|
nx_layer.create_dataset("laser_energy", data = layer_dict["laser_energy"]["value"])
|
||||||
|
nx_layer["laser_energy"].attrs["units"] = layer_dict["laser_energy"]["units"]
|
||||||
|
except TypeError as te:
|
||||||
|
raise TypeError(te)
|
||||||
|
## Rastering metadata
|
||||||
|
try:
|
||||||
|
nx_laser_rastering.create_dataset("geometry", data = rastering_dict["geometry"])
|
||||||
|
nx_laser_rastering.create_dataset("positions", data = rastering_dict["positions"])
|
||||||
|
nx_laser_rastering.create_dataset("velocities", data = rastering_dict["velocities"])
|
||||||
|
except TypeError as te:
|
||||||
|
raise TypeError(te)
|
||||||
|
## Annealing metadata
|
||||||
|
try:
|
||||||
|
nx_pre_annealing.create_dataset("ambient_gas", data = pre_ann_dict["ambient_gas"])
|
||||||
|
nx_pre_annealing.create_dataset("pressure", data = pre_ann_dict["pressure"]["value"])
|
||||||
|
nx_pre_annealing["pressure"].attrs["units"] = pre_ann_dict["pressure"]["units"]
|
||||||
|
nx_pre_annealing.create_dataset("temperature", data = pre_ann_dict["temperature"]["value"])
|
||||||
|
nx_pre_annealing["temperature"].attrs["units"] = pre_ann_dict["temperature"]["units"]
|
||||||
|
nx_pre_annealing.create_dataset("duration", data = pre_ann_dict["duration"]["value"])
|
||||||
|
nx_pre_annealing["duration"].attrs["units"] = pre_ann_dict["duration"]["units"]
|
||||||
|
except TypeError as te:
|
||||||
|
raise TypeError(te)
|
||||||
|
try:
|
||||||
|
nx_post_annealing.create_dataset("ambient_gas", data = post_ann_dict["ambient_gas"])
|
||||||
|
nx_post_annealing.create_dataset("pressure", data = post_ann_dict["pressure"]["value"])
|
||||||
|
nx_post_annealing["pressure"].attrs["units"] = post_ann_dict["pressure"]["units"]
|
||||||
|
nx_post_annealing.create_dataset("temperature", data = post_ann_dict["temperature"]["value"])
|
||||||
|
nx_post_annealing["temperature"].attrs["units"] = post_ann_dict["temperature"]["units"]
|
||||||
|
nx_post_annealing.create_dataset("duration", data = post_ann_dict["duration"]["value"])
|
||||||
|
nx_post_annealing["duration"].attrs["units"] = post_ann_dict["duration"]["units"]
|
||||||
|
except TypeError as te:
|
||||||
|
raise TypeError(te)
|
||||||
|
try:
|
||||||
|
nx_layer_instruments.create_dataset("laser_system", data = layer_instruments_dict["laser_system"])
|
||||||
|
nx_layer_instruments.create_dataset("deposition_chamber", data = layer_instruments_dict["deposition_chamber"])
|
||||||
|
nx_layer_instruments.create_dataset("rheed_system", data = layer_instruments_dict["rheed_system"])
|
||||||
|
except TypeError as te:
|
||||||
|
raise TypeError(te)
|
||||||
|
|
||||||
|
# Instruments used section
|
||||||
|
nx_instruments = nx_pld_entry.create_group("instruments_used")
|
||||||
|
nx_instruments.attrs["NX_class"] = "NXinstrument"
|
||||||
|
instruments_dict = pld_fabrication["instruments_used"]
|
||||||
|
try:
|
||||||
|
nx_instruments.create_dataset("laser_system", data = instruments_dict["laser_system"])
|
||||||
|
nx_instruments.create_dataset("deposition_chamber", data = instruments_dict["deposition_chamber"])
|
||||||
|
nx_instruments.create_dataset("rheed_system", data = instruments_dict["rheed_system"])
|
||||||
|
except TypeError as te:
|
||||||
|
raise TypeError(te)
|
||||||
|
|
||||||
|
# RHEED data section
|
||||||
|
if rheed_osc is not None:
|
||||||
|
nx_rheed = nx_pld_entry.create_group("rheed_data")
|
||||||
|
nx_rheed.attrs["NX_class"] = "NXdata"
|
||||||
|
|
||||||
|
# Asse temporale
|
||||||
|
t_ds = nx_rheed.create_dataset("time", data=rheed_osc["time"])
|
||||||
|
t_ds.attrs["units"] = "s"
|
||||||
|
t_ds.attrs["long_name"] = "Time"
|
||||||
|
|
||||||
|
# Intensità: shape (n_layers, n_timepoints, 3)
|
||||||
|
i_ds = nx_rheed.create_dataset("intensity", data=rheed_osc["intensity"])
|
||||||
|
i_ds.attrs["units"] = "a.u."
|
||||||
|
i_ds.attrs["long_name"] = "RHEED Intensity"
|
||||||
|
|
||||||
|
# Attributi NXdata — notazione NeXus 3.x corretta
|
||||||
|
nx_rheed.attrs["signal"] = "intensity"
|
||||||
|
nx_rheed.attrs["axes"] = [".", "time", "."] # solo l'asse 1 (time) è denominato
|
||||||
|
nx_rheed.attrs["time_indices"] = np.array([1], dtype=np.int32)
|
||||||
|
# ###########
|
||||||
|
# nx_rheed = nx_pld_entry.create_group("rheed_data")
|
||||||
|
# nx_rheed.attrs["NX_class"] = "NXdata"
|
||||||
|
|
||||||
|
# nx_rheed.create_dataset("time", data=rheed_osc["time"])
|
||||||
|
# nx_rheed["time"].attrs["units"] = "s"
|
||||||
|
|
||||||
|
# nx_rheed.create_dataset("intensity", data=rheed_osc["intensity"])
|
||||||
|
# #nx_rheed["intensity"].attrs["units"] = "counts"
|
||||||
|
# nx_rheed["intensity"].attrs["long_name"] = "RHEED intensity"
|
||||||
|
# nx_rheed.attrs["signal"] = "intensity"
|
||||||
|
# nx_rheed.attrs["axes"] = "layer:time:channel"
|
||||||
|
# nx_rheed.attrs["layer_indices"] = [0] # asse layer
|
||||||
|
# nx_rheed.attrs["time_indices"] = [1] # asse tempo
|
||||||
|
# nx_rheed.attrs["channel_indices"] = [2]
|
||||||
|
return
|
||||||
|
|
||||||
if __name__=="__main__":
|
if __name__=="__main__":
|
||||||
print(f"=======================\n===== DEBUG MODE! =====\n=======================\n")
|
# TO-DO: place the API base URL somewhere else.
|
||||||
ELABFTW_API_URL = "https://elabftw.fisica.unina.it/api/v2"
|
ELABFTW_API_URL = "https://elabftw.fisica.unina.it/api/v2"
|
||||||
apikey = getpass("Paste API key here: ")
|
apikey = getpass("Paste API key here: ")
|
||||||
elabid = input("Enter elabid of your starting sample [default= 1111]: ") or 1111
|
elabid = input("Enter elabid of your starting sample [default = 1111]: ") or 1111
|
||||||
data = APIHandler(apikey).get_entry_from_elabid(elabid)
|
data = APIHandler(apikey).get_entry_from_elabid(elabid)
|
||||||
sample = Entrypoint(data)
|
sample = Entrypoint(data)
|
||||||
batch = chain_entrypoint_to_batch(sample) # Material-class object
|
sample_name = sample.name.strip().replace(" ","_")
|
||||||
bd = batch.__dict__
|
substrate_object = chain_entrypoint_to_batch(sample) # Substrate-class object
|
||||||
bd.pop("extra")
|
|
||||||
layers = chain_entrypoint_to_layers(sample) # list of Layer-class objects
|
layers = chain_entrypoint_to_layers(sample) # list of Layer-class objects
|
||||||
print(f"Sample name:\n{sample.name}\n")
|
n_layers = len(layers) # total number of layers on the sample
|
||||||
print(f"Substrate data:\n{bd}\n")
|
result = make_nexus_schema_dictionary(substrate_object, layers)
|
||||||
print(f"Layers data:")
|
# print(make_nexus_schema_dictionary(substrate_object, layers)) # debug
|
||||||
for layer in layers:
|
with open (f"output/sample-{sample_name}.json", "w") as f:
|
||||||
ld = layer.__dict__
|
json.dump(result, f, indent=3)
|
||||||
ld.pop("extra")
|
# TO-DO: remove the hard-coded path of the RWA file
|
||||||
tgt = chain_layer_to_target(layer)
|
# ideally the script should download a TXT/CSV file from each layer
|
||||||
td = tgt.__dict__
|
# (IF PRESENT ←→ also handle missing file error)
|
||||||
td.pop("extra")
|
# and merge all data in a single file to analyse it
|
||||||
print(ld)
|
with open(f"tests/Realtime_Window_Analysis.txt", "r") as o:
|
||||||
print(td)
|
osc = np.loadtxt(o, delimiter="\t")
|
||||||
print()
|
try:
|
||||||
|
rheed_osc = analyse_rheed_data(data=osc) or None # analyze rheed data first, build the file later
|
||||||
# entryType = None
|
except ValueError as ve:
|
||||||
# while entryType not in ["items", "experiments"]:
|
raise ValueError(f"Error with function analyse_rheed_data. {ve}\nPlease make sure the Realtime Window Analysis file is exactly 4 columns wide - where the first column represents time and the others are RHEED intensities.")
|
||||||
# eT = input("Enter a valid entry type [items, experiments]: ")
|
build_nexus_file(result, output_path=f"output/sample-{sample_name}-nexus.h5", rheed_osc=rheed_osc)
|
||||||
# # This allows for a shortcut: instead of prompting the type before and the elabid after I can just prompt both at the same time - e.g. e51 is exp. 51, i1108 is item 1108...
|
|
||||||
# if eT[0] in ["e", "i"] and eT[-1].isnumeric():
|
|
||||||
# try:
|
|
||||||
# elabid = int(eT[1:])
|
|
||||||
# eT = eT[0]
|
|
||||||
# except Exception:
|
|
||||||
# print("Usage: i|item|items|i[ELABID] for items, e|experiment|experiments|e[ELABID] for experiments.")
|
|
||||||
# continue
|
|
||||||
# match eT:
|
|
||||||
# case "items" | "i" | "item":
|
|
||||||
# entryType = "items"
|
|
||||||
# case "experiments" | "e" | "exp" | "experiment":
|
|
||||||
# entryType = "experiments"
|
|
||||||
# case _:
|
|
||||||
# continue
|
|
||||||
# # This will probably be reworked in production
|
|
||||||
# try:
|
|
||||||
# elabid = elabid
|
|
||||||
# except NameError:
|
|
||||||
# elabid = input("Input elabid here [default = 1111]: ") or 1111
|
|
||||||
# data = APIHandler(apikey).get_entry_from_elabid(elabid, entryType)
|
|
||||||
# if entryType == "experiments":
|
|
||||||
# layer = Layer(data)
|
|
||||||
# result = layer.__dict__
|
|
||||||
# result.pop("extra")
|
|
||||||
# print(result)
|
|
||||||
# elif entryType == "items":
|
|
||||||
# if data.get("category_title") == "Sample":
|
|
||||||
# item = Entrypoint(data)
|
|
||||||
# elif data.get("category_title") in ["PLD Target", "Substrate"]:
|
|
||||||
# item = Material(data)
|
|
||||||
# print(item.get_compound_formula(apikey))
|
|
||||||
# else:
|
|
||||||
# raise Exception("The selected item or experiment is not in one of the following categories: [Sample, PLD Target, Substrate, PLD Deposition].")
|
|
||||||
# result = item.__dict__
|
|
||||||
# result.pop("extra")
|
|
||||||
# print(result)
|
|
||||||
|
|||||||
37931
tests/Realtime_Window_Analysis.txt
Normal file
37931
tests/Realtime_Window_Analysis.txt
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user