diff --git a/docs/source/conf.py b/docs/source/conf.py index 5b41c5ef..b65416c9 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -21,10 +21,9 @@ # -- Project information ----------------------------------------------------- -project = 'RAMP' -copyright = '2022, Author List' -author = 'Author List' - +project = "RAMP" +copyright = "2022, Author List" +author = "Author List" # -- General configuration --------------------------------------------------- @@ -40,14 +39,14 @@ "sphinx.ext.coverage", "sphinx.ext.autosummary", "sphinx.ext.napoleon", - 'sphinx.ext.duration', - 'sphinx.ext.doctest', - 'sphinx_copybutton', - 'sphinx.ext.autosectionlabel' + "sphinx.ext.duration", + "sphinx.ext.doctest", + "sphinx_copybutton", + "sphinx.ext.autosectionlabel", ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -61,13 +60,12 @@ # a list of builtin themes. # extensions.append("sphinxjp.themes.basicstrap") -html_theme = 'basicstrap' -#html_theme_path = [sphinx_pdj_theme.get_html_theme_path()] +html_theme = "basicstrap" +# html_theme_path = [sphinx_pdj_theme.get_html_theme_path()] # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # copy btn settings copybutton_prompt_text = "" - diff --git a/ramp/__init__.py b/ramp/__init__.py index 2c9a6ee8..5f2dface 100644 --- a/ramp/__init__.py +++ b/ramp/__init__.py @@ -15,13 +15,13 @@ from ramp._version import __version__ -from ramp.core.core import UseCase,User,Appliance +from ramp.core.core import UseCase, User, Appliance from ramp.core.stochastic_process import calc_peak_time_range from ramp.core.utils import yearly_pattern -from ramp.example.examples import load_data,download_example -from ramp.post_process.post_process import Run,Plot +from ramp.example.examples import load_data, download_example +from ramp.post_process.post_process import Run, Plot __authors__ = "Listed in AUTHORS" -__copyright__ = "Licensed under the European Union Public Licence (EUPL), Version 1.2-or-later" - - +__copyright__ = ( + "Licensed under the European Union Public Licence (EUPL), Version 1.2-or-later" +) diff --git a/ramp/_version.py b/ramp/_version.py index 0bf3b7d6..3d26edf7 100644 --- a/ramp/_version.py +++ b/ramp/_version.py @@ -1,2 +1 @@ - __version__ = "0.4.1" diff --git a/ramp/cli.py b/ramp/cli.py index f5f3b4b1..541eabf0 100644 --- a/ramp/cli.py +++ b/ramp/cli.py @@ -57,11 +57,7 @@ ) parser.add_argument( - "--ext", - dest="extension", - type=str, - help="Format of input files", - default="xlsx" + "--ext", dest="extension", type=str, help="Format of input files", default="xlsx" ) parser.add_argument( @@ -76,7 +72,6 @@ def main(): - args = vars(parser.parse_args()) fnames = args["fname_path"] ofnames = args["ofname_path"] @@ -100,7 +95,9 @@ def main(): if years is not None: if date_start is not None or date_end is not None: - raise ValueError("You cannot use the option -y in combinaison with --date-start and/or --date-end") + raise ValueError( + "You cannot use the option -y in combinaison with --date-start and/or --date-end" + ) else: date_start = datetime.date(years[0], 1, 1) date_end = datetime.date(years[-1], 12, 31) @@ -109,20 +106,27 @@ def main(): # Triggers the special mode "one input file per month" if os.path.isdir(fnames[0]): dir_path = fnames[0] - fnames = [os.path.join(dir_path, f) for f in os.listdir(fnames[0]) if f.endswith(ext)] - fnames.sort(key=lambda f: int(''.join(filter(str.isdigit, f)))) + fnames = [ + os.path.join(dir_path, f) + for f in os.listdir(fnames[0]) + if f.endswith(ext) + ] + fnames.sort(key=lambda f: int("".join(filter(str.isdigit, f)))) if len(fnames) == 12: - print("The following input files were found and will be used in this exact order for month inputs") + print( + "The following input files were found and will be used in this exact order for month inputs" + ) print("\n".join(fnames)) month_files = True year = years[0] else: - raise ValueError(f"You want to simulate a whole year, yet the folder {dir_path} only contains {len(fnames)} out of the 12 monthes required") + raise ValueError( + f"You want to simulate a whole year, yet the folder {dir_path} only contains {len(fnames)} out of the 12 monthes required" + ) else: print("You selected a single year but the input path is not a folder.") - if date_start is not None and date_end is not None: days = pd.date_range(start=date_start, end=date_end) else: @@ -132,7 +136,9 @@ def main(): ofnames = [None] if fnames is None: - print("Please provide path to input file with option -i, \n\nDefault to old version of RAMP input files\n") + print( + "Please provide path to input file with option -i, \n\nDefault to old version of RAMP input files\n" + ) # Files are specified as numbers in a list (e.g. [1,2] will consider input_file_1.py and input_file_2.py) from ramp.ramp_run import input_files_to_run @@ -142,7 +148,8 @@ def main(): else: if len(num_profiles) != len(input_files_to_run): raise ValueError( - "The number of profiles parameters should match the number of input files provided") + "The number of profiles parameters should match the number of input files provided" + ) else: num_profiles = [None] * len(input_files_to_run) @@ -167,34 +174,49 @@ def main(): else: if len(num_profiles) != len(fnames): raise ValueError( - "The number of profiles parameters should match the number of input files provided") + "The number of profiles parameters should match the number of input files provided" + ) else: num_profiles = [None] * len(fnames) if month_files is True: year_profile = [] for i, fname in enumerate(fnames): - month_start = datetime.date(year, i+1, 1) - month_end = datetime.date(year, i+1, pd.Period(month_start, freq="D").days_in_month) - days = pd.date_range(start=month_start, end=month_end, freq='D') - monthly_profiles = run_usecase(fname=fname, num_profiles=num_profiles[i], days=days, plot=False, parallel=parallel_processing) + month_start = datetime.date(year, i + 1, 1) + month_end = datetime.date( + year, i + 1, pd.Period(month_start, freq="D").days_in_month + ) + days = pd.date_range(start=month_start, end=month_end, freq="D") + monthly_profiles = run_usecase( + fname=fname, + num_profiles=num_profiles[i], + days=days, + plot=False, + parallel=parallel_processing, + ) year_profile.append(np.hstack(monthly_profiles)) # Create a dataFrame to save the year profile with timestamps every minutes series_frame = pd.DataFrame( np.hstack(year_profile), - index=pd.date_range(start=f"{year}-1-1", end=f"{year}-12-31 23:59", freq="T") + index=pd.date_range( + start=f"{year}-1-1", end=f"{year}-12-31 23:59", freq="T" + ), ) # Save to minute and hour resolution # TODO let the user choose where to save the files/file_name, make sure the user wants to overwrite the file # if it already exists - series_frame.to_csv(os.path.join(BASE_PATH, 'yearly_profile_min_resolution.csv')) + series_frame.to_csv( + os.path.join(BASE_PATH, "yearly_profile_min_resolution.csv") + ) resampled = pd.DataFrame() resampled["mean"] = series_frame.resample("H").mean() resampled["max"] = series_frame.resample("H").max() resampled["min"] = series_frame.resample("H").min() - #TODO add more columns with other resampled functions (do this in Jupyter) - resampled.to_csv(os.path.join(BASE_PATH, 'yearly_profile_hourly_resolution.csv')) + # TODO add more columns with other resampled functions (do this in Jupyter) + resampled.to_csv( + os.path.join(BASE_PATH, "yearly_profile_hourly_resolution.csv") + ) else: if len(ofnames) == 1: ofnames = ofnames * len(fnames) @@ -214,5 +236,4 @@ def main(): if __name__ == "__main__": - main() diff --git a/ramp/core/constants.py b/ramp/core/constants.py index ea7a856d..d3433aff 100644 --- a/ramp/core/constants.py +++ b/ramp/core/constants.py @@ -64,64 +64,64 @@ def switch_on_parameters(): APPLIANCE_ATTRIBUTES = ( - "name", - "number", - "power", - "num_windows", - "func_time", - "time_fraction_random_variability", - "func_cycle", - "fixed", - "fixed_cycle", - "occasional_use", - "flat", - "thermal_p_var", - "pref_index", - "wd_we_type", - "p_11", - "t_11", - "cw11", - "p_12", - "t_12", - "cw12", - "r_c1", - "p_21", - "t_21", - "cw21", - "p_22", - "t_22", - "cw22", - "r_c2", - "p_31", - "t_31", - "cw31", - "p_32", - "t_32", - "cw32", - "r_c3", - "window_1", - "window_2", - "window_3", - "random_var_w", - ) + "name", + "number", + "power", + "num_windows", + "func_time", + "time_fraction_random_variability", + "func_cycle", + "fixed", + "fixed_cycle", + "occasional_use", + "flat", + "thermal_p_var", + "pref_index", + "wd_we_type", + "p_11", + "t_11", + "cw11", + "p_12", + "t_12", + "cw12", + "r_c1", + "p_21", + "t_21", + "cw21", + "p_22", + "t_22", + "cw22", + "r_c2", + "p_31", + "t_31", + "cw31", + "p_32", + "t_32", + "cw32", + "r_c3", + "window_1", + "window_2", + "window_3", + "random_var_w", +) APPLIANCE_ARGS = ( - "number", - "power", - #"p_series", - "num_windows", - "func_time", - "time_fraction_random_variability", - "func_cycle", - "fixed", - "fixed_cycle", - "occasional_use", - "flat", - "thermal_p_var", - "pref_index", - "wd_we_type", - "name", - ) + "number", + "power", + # "p_series", + "num_windows", + "func_time", + "time_fraction_random_variability", + "func_cycle", + "fixed", + "fixed_cycle", + "occasional_use", + "flat", + "thermal_p_var", + "pref_index", + "wd_we_type", + "name", +) MAX_WINDOWS = 3 WINDOWS_PARAMETERS = ("window_1", "window_2", "window_3", "random_var_w") @@ -129,4 +129,4 @@ def switch_on_parameters(): ("p_11", "t_11", "cw11", "p_12", "t_12", "cw12", "r_c1"), ("p_21", "t_21", "cw21", "p_22", "t_22", "cw22", "r_c2"), ("p_31", "t_31", "cw31", "p_32", "t_32", "cw32", "r_c3"), -) \ No newline at end of file +) diff --git a/ramp/core/core.py b/ramp/core/core.py index a2aac595..2cffb77b 100644 --- a/ramp/core/core.py +++ b/ramp/core/core.py @@ -12,13 +12,27 @@ import warnings import random import math -from ramp.core.constants import NEW_TO_OLD_MAPPING, APPLIANCE_ATTRIBUTES, APPLIANCE_ARGS, WINDOWS_PARAMETERS, DUTY_CYCLE_PARAMETERS, switch_on_parameters -from ramp.core.utils import random_variation, duty_cycle, random_choice, read_input_file, within_peak_time_window +from ramp.core.constants import ( + NEW_TO_OLD_MAPPING, + APPLIANCE_ATTRIBUTES, + APPLIANCE_ARGS, + WINDOWS_PARAMETERS, + DUTY_CYCLE_PARAMETERS, + switch_on_parameters, +) +from ramp.core.utils import ( + random_variation, + duty_cycle, + random_choice, + read_input_file, + within_peak_time_window, +) + + +from typing import List, Union, Iterable +from ramp.errors_logs.errors import InvalidType, InvalidWindow -from typing import List, Union,Iterable -from ramp.errors_logs.errors import InvalidType,InvalidWindow - def single_appliance_daily_load_profile(args): app, args = args app.generate_load_profile(*args, power=app.power[args[0]]) @@ -27,8 +41,8 @@ def single_appliance_daily_load_profile(args): class UseCase: - def __init__(self, name:str="", users:Union[List,None]=None): - """ Creates a UseCase instance for gathering a list of User instances which own Appliance instances + def __init__(self, name: str = "", users: Union[List, None] = None): + """Creates a UseCase instance for gathering a list of User instances which own Appliance instances Parameters ---------- @@ -60,7 +74,9 @@ def add_user(self, user) -> None: if isinstance(user, User): self.users.append(user) else: - raise InvalidType(f"{type(user)} is not valid. Only 'User' type is acceptable.") + raise InvalidType( + f"{type(user)} is not valid. Only 'User' type is acceptable." + ) def collect_appliances_from_users(self): appliances = [] @@ -68,7 +84,6 @@ def collect_appliances_from_users(self): appliances = appliances + user.App_list self.appliances = appliances - def generate_daily_load_profiles(self, num_profiles, peak_time_range, day_types): profiles = [] for prof_i in range(num_profiles): @@ -78,15 +93,19 @@ def generate_daily_load_profiles(self, num_profiles, peak_time_range, day_types) # for each User instance generate a load profile, iterating through all user of this instance and # all appliances they own, corresponds to step 2. of [1], p.7 for user in self.users: - user.generate_aggregated_load_profile(prof_i, peak_time_range, day_types) + user.generate_aggregated_load_profile( + prof_i, peak_time_range, day_types + ) # aggregate the user load to the usecase load usecase_load = usecase_load + user.load profiles.append(usecase_load) # screen update about progress of computation - #print('Profile', prof_i+1, '/', num_profiles, 'completed') + # print('Profile', prof_i+1, '/', num_profiles, 'completed') return profiles - def generate_daily_load_profiles_parallel(self, num_profiles, peak_time_range, day_types): + def generate_daily_load_profiles_parallel( + self, num_profiles, peak_time_range, day_types + ): max_parallel_processes = multiprocessing.cpu_count() tasks = [] t = 0 @@ -103,12 +122,13 @@ def generate_daily_load_profiles_parallel(self, num_profiles, peak_time_range, d with multiprocessing.Pool(max_parallel_processes) as pool: with tqdm( - total=len(tasks), - desc=f"Computing appliances profiles", - unit="unit", + total=len(tasks), + desc=f"Computing appliances profiles", + unit="unit", ) as pbar: - - imap_unordered_it = pool.imap_unordered(single_appliance_daily_load_profile, tasks, chunksize=4) + imap_unordered_it = pool.imap_unordered( + single_appliance_daily_load_profile, tasks, chunksize=4 + ) for prof_i, daily_load in imap_unordered_it: if prof_i in daily_profiles_dict: daily_profiles_dict[prof_i].append(daily_load) @@ -119,11 +139,13 @@ def generate_daily_load_profiles_parallel(self, num_profiles, peak_time_range, d daily_profiles = np.zeros((num_profiles, 1440)) for day_id in range(num_profiles): - daily_profiles[day_id, :] = np.vstack(daily_profiles_dict[day_id]).sum(axis=0) + daily_profiles[day_id, :] = np.vstack(daily_profiles_dict[day_id]).sum( + axis=0 + ) return daily_profiles - def save(self, filename:str=None) -> Union[pd.DataFrame,None]: + def save(self, filename: str = None) -> Union[pd.DataFrame, None]: """Saves/returns the model databas including all the users and their appliances as a single pd.DataFrame or excel file. Parameters @@ -161,7 +183,7 @@ def export_to_dataframe(self) -> pd.DataFrame: """ return self.save() - def load(self, filename:str) -> None: + def load(self, filename: str) -> None: """Open an .xlsx file which was produced via the save method and create instances of Users and Appliances Parameters @@ -247,11 +269,10 @@ def load(self, filename:str) -> None: self.collect_appliances_from_users() - - - class User: - def __init__(self, user_name:str="", num_users:int=1, user_preference:int=0): + def __init__( + self, user_name: str = "", num_users: int = 1, user_preference: int = 0 + ): """Creates a User instance (User Category) Parameters @@ -268,10 +289,14 @@ def __init__(self, user_name:str="", num_users:int=1, user_preference:int=0): self.user_preference = user_preference self.rand_daily_pref = 0 self.load = None - self.App_list = [] # each instance of User (i.e. each user class) has its own list of Appliances + self.App_list = ( + [] + ) # each instance of User (i.e. each user class) has its own list of Appliances def __repr__(self): - return self.save()[["user_name","num_users","name","number","power"]].to_string() + return self.save()[ + ["user_name", "num_users", "name", "number", "power"] + ].to_string() def add_appliance(self, *args, **kwargs): """adds an appliance to the user category with all the appliance characteristics in a single function @@ -302,7 +327,7 @@ def add_appliance(self, *args, **kwargs): if k in kwargs: cycle_parameters[k] = kwargs.pop(k) if cycle_parameters: - duty_cycle_parameters[i+1] = cycle_parameters + duty_cycle_parameters[i + 1] = cycle_parameters app = Appliance(self, **kwargs) @@ -324,11 +349,15 @@ def maximum_profile(self) -> np.array: user_max_profile = np.zeros(1440) for appliance in self.App_list: # Calculate windows curve, i.e. the theoretical maximum curve that can be obtained, for each app, by switching-on always all the 'n' apps altogether in any time-step of the functioning windows - app_max_profile = appliance.maximum_profile # this computes the curve for the specific App - user_max_profile = np.vstack([user_max_profile, app_max_profile]) # this stacks the specific App curve in an overall curve comprising all the Apps within a User class + app_max_profile = ( + appliance.maximum_profile + ) # this computes the curve for the specific App + user_max_profile = np.vstack( + [user_max_profile, app_max_profile] + ) # this stacks the specific App curve in an overall curve comprising all the Apps within a User class return np.transpose(np.sum(user_max_profile, axis=0)) * self.num_users - def save(self, filename:str=None) -> Union[pd.DataFrame,None]: + def save(self, filename: str = None) -> Union[pd.DataFrame, None]: """Saves/returns the model databas including allappliances as a single pd.DataFrame or excel file. Parameters @@ -397,7 +426,6 @@ def __eq__(self, other_user): if len(answer) > 0: answer = answer.all() else: - if len(self.App_list) > 0: answer = False else: @@ -429,7 +457,6 @@ def export_to_dataframe(self) -> pd.DataFrame: """ return self.save() - def Appliance( self, number=1, @@ -470,7 +497,9 @@ def Appliance( name=name, ) - def generate_single_load_profile(self, prof_i:int, peak_time_range:np.array, day_type:int): + def generate_single_load_profile( + self, prof_i: int, peak_time_range: np.array, day_type: int + ): """Generates a load profile for a single user taking all its appliances into consideration Parameters @@ -491,17 +520,24 @@ def generate_single_load_profile(self, prof_i:int, peak_time_range:np.array, day """ if prof_i not in range(365): - raise ValueError(f'prof_i should be an integer in range of 0 to 364') + raise ValueError(f"prof_i should be an integer in range of 0 to 364") single_load = np.zeros(1440) - self.rand_daily_pref = 0 if self.user_preference == 0 else random.randint(1, self.user_preference) - - for App in self.App_list: # iterates for all the App types in the given User class + self.rand_daily_pref = ( + 0 if self.user_preference == 0 else random.randint(1, self.user_preference) + ) - App.generate_load_profile(prof_i, peak_time_range, day_type, power=App.power[prof_i]) + for ( + App + ) in self.App_list: # iterates for all the App types in the given User class + App.generate_load_profile( + prof_i, peak_time_range, day_type, power=App.power[prof_i] + ) - single_load = single_load + App.daily_use # adds the Appliance load profile to the single User load profile + single_load = ( + single_load + App.daily_use + ) # adds the Appliance load profile to the single User load profile return single_load def generate_aggregated_load_profile(self, prof_i, peak_time_range, day_type): @@ -529,34 +565,36 @@ def generate_aggregated_load_profile(self, prof_i, peak_time_range, day_type): """ if prof_i not in range(365): - raise ValueError(f'prof_i should be an integer in range of 0 to 364') - + raise ValueError(f"prof_i should be an integer in range of 0 to 364") self.load = np.zeros(1440) # initialise empty load for User instance for _ in range(self.num_users): # iterates for every single user within a User class. - self.load = self.load + self.generate_single_load_profile(prof_i, peak_time_range, day_type) + self.load = self.load + self.generate_single_load_profile( + prof_i, peak_time_range, day_type + ) return self.load + class Appliance: def __init__( self, user, - number:int=1, - power:Union[float,pd.DataFrame]=0, - num_windows:int=1, - func_time:int=0, - time_fraction_random_variability:float=0, - func_cycle:int=1, - fixed:str="no", - fixed_cycle:int=0, - occasional_use:float=1, - flat:str="no", - thermal_p_var:int=0, - pref_index:int=0, - wd_we_type:int=2, - name:str="", + number: int = 1, + power: Union[float, pd.DataFrame] = 0, + num_windows: int = 1, + func_time: int = 0, + time_fraction_random_variability: float = 0, + func_cycle: int = 1, + fixed: str = "no", + fixed_cycle: int = 0, + occasional_use: float = 1, + flat: str = "no", + thermal_p_var: int = 0, + pref_index: int = 0, + wd_we_type: int = 2, + name: str = "", ): """Creates an appliance for a given user @@ -620,29 +658,25 @@ def __init__( self.num_windows = num_windows self.func_time = func_time self.time_fraction_random_variability = time_fraction_random_variability - self.func_cycle = ( - func_cycle - ) + self.func_cycle = func_cycle self.fixed = fixed self.fixed_cycle = fixed_cycle self.occasional_use = occasional_use self.flat = flat - self.thermal_p_var = ( - thermal_p_var - ) + self.thermal_p_var = thermal_p_var self.pref_index = pref_index self.wd_we_type = wd_we_type - if isinstance(power,pd.DataFrame): - if power.shape == (365,1): - power = power.values[:,0] + if isinstance(power, pd.DataFrame): + if power.shape == (365, 1): + power = power.values[:, 0] else: raise ValueError("wrong size of array. array size should be (365,1).") - elif isinstance(power,str): - power = pd.read_json(power).values[:,0] + elif isinstance(power, str): + power = pd.read_json(power).values[:, 0] - elif isinstance(power,(float,int)): + elif isinstance(power, (float, int)): # TODO change this automatic value depending on the range of the usecase power = power * np.ones(366) @@ -703,7 +737,6 @@ def save(self) -> pd.DataFrame: for user_attribute in ("user_name", "num_users", "user_preference"): dm[user_attribute] = getattr(self.user, user_attribute) for attribute in APPLIANCE_ATTRIBUTES: - if hasattr(self, attribute): if "window_" in attribute or "cw" in attribute: window_value = getattr(self, attribute) @@ -720,7 +753,7 @@ def save(self) -> pd.DataFrame: dm[attribute] = getattr(self, attribute) else: # this is for legacy purpose, so that people can export their old models to new format - old_attribute = NEW_TO_OLD_MAPPING.get(attribute,attribute) + old_attribute = NEW_TO_OLD_MAPPING.get(attribute, attribute) if hasattr(self, old_attribute): if "window_" in attribute or "cw" in attribute: window_value = getattr(self, old_attribute) @@ -754,13 +787,13 @@ def export_to_dataframe(self) -> pd.DataFrame: return self.save() def __repr__(self): - try: - return self.save()[["user_name","num_users","name","number","power"]].to_string() + return self.save()[ + ["user_name", "num_users", "name", "number", "power"] + ].to_string() except Exception: return "" - def __eq__(self, other_appliance) -> bool: """checks the equality of two appliances @@ -793,7 +826,13 @@ def __eq__(self, other_appliance) -> bool: np.append(answer, False) return answer.all() - def windows(self, window_1:Iterable=None, window_2:Iterable=None,random_var_w:float=0 ,window_3:Iterable=None): + def windows( + self, + window_1: Iterable = None, + window_2: Iterable = None, + random_var_w: float = 0, + window_3: Iterable = None, + ): """assings functioning windows to the appliance and adds the appliance to the user class Parameters @@ -835,20 +874,28 @@ def windows(self, window_1:Iterable=None, window_2:Iterable=None,random_var_w:fl """ if window_1 is None: - warnings.warn(UserWarning("No windows is declared, default window of 24 hours is selected")) + warnings.warn( + UserWarning( + "No windows is declared, default window of 24 hours is selected" + ) + ) self.window_1 = np.array([0, 1440]) else: self.window_1 = window_1 if window_2 is None: if self.num_windows >= 2: - raise InvalidWindow("Windows 2 is not provided although 2 windows were declared") + raise InvalidWindow( + "Windows 2 is not provided although 2 windows were declared" + ) else: self.window_2 = window_2 if window_3 is None: if self.num_windows == 3: - raise InvalidWindow("Windows 3 is not provided although 3 windows were declared") + raise InvalidWindow( + "Windows 3 is not provided although 3 windows were declared" + ) else: self.window_3 = window_3 @@ -857,18 +904,30 @@ def windows(self, window_1:Iterable=None, window_2:Iterable=None,random_var_w:fl for i in range(1, self.num_windows + 1, 1): window_time = window_time + np.diff(getattr(self, f"window_{i}"))[0] if window_time < self.func_time: - raise InvalidWindow(f"The sum of all windows time intervals for the appliance '{self.name}' of user '{self.user.user_name}' is smaller than the time the appliance is supposed to be on ({window_time} < {self.func_time}). Please check your input file for typos.") + raise InvalidWindow( + f"The sum of all windows time intervals for the appliance '{self.name}' of user '{self.user.user_name}' is smaller than the time the appliance is supposed to be on ({window_time} < {self.func_time}). Please check your input file for typos." + ) self.random_var_w = random_var_w - self.daily_use = np.zeros(1440) #create an empty daily use profile - self.daily_use[self.window_1[0]:(self.window_1[1])] = np.full(np.diff(self.window_1),0.001) #fills the daily use profile with infinitesimal values that are just used to identify the functioning windows - self.daily_use[self.window_2[0]:(self.window_2[1])] = np.full(np.diff(self.window_2),0.001) #same as above for window2 - self.daily_use[self.window_3[0]:(self.window_3[1])] = np.full(np.diff(self.window_3),0.001) #same as above for window3 - - self.random_var_1 = int(random_var_w*np.diff(self.window_1)) #calculate the random variability of window1, i.e. the maximum range of time they can be enlarged or shortened - self.random_var_2 = int(random_var_w*np.diff(self.window_2)) #same as above - self.random_var_3 = int(random_var_w*np.diff(self.window_3)) #same as above - self.user.App_list.append(self) #automatically appends the appliance to the user's appliance list + self.daily_use = np.zeros(1440) # create an empty daily use profile + self.daily_use[self.window_1[0] : (self.window_1[1])] = np.full( + np.diff(self.window_1), 0.001 + ) # fills the daily use profile with infinitesimal values that are just used to identify the functioning windows + self.daily_use[self.window_2[0] : (self.window_2[1])] = np.full( + np.diff(self.window_2), 0.001 + ) # same as above for window2 + self.daily_use[self.window_3[0] : (self.window_3[1])] = np.full( + np.diff(self.window_3), 0.001 + ) # same as above for window3 + + self.random_var_1 = int( + random_var_w * np.diff(self.window_1) + ) # calculate the random variability of window1, i.e. the maximum range of time they can be enlarged or shortened + self.random_var_2 = int(random_var_w * np.diff(self.window_2)) # same as above + self.random_var_3 = int(random_var_w * np.diff(self.window_3)) # same as above + self.user.App_list.append( + self + ) # automatically appends the appliance to the user's appliance list if self.fixed_cycle == 1: self.cw11 = self.window_1 @@ -879,24 +938,46 @@ def assign_random_cycles(self): Calculates randomised cycles taking the random variability in the duty cycle duration """ if self.fixed_cycle >= 1: - p_11 = random_variation(var=self.thermal_p_var, norm=self.p_11) #randomly variates the power of thermal apps, otherwise variability is 0 - p_12 = random_variation(var=self.thermal_p_var, norm=self.p_12) #randomly variates the power of thermal apps, otherwise variability is 0 - self.random_cycle1 = duty_cycle(var=self.r_c1, t1=self.t_11, p1=p_11, t2=self.t_12, p2=p_12) #randomise also the fixed cycle + p_11 = random_variation( + var=self.thermal_p_var, norm=self.p_11 + ) # randomly variates the power of thermal apps, otherwise variability is 0 + p_12 = random_variation( + var=self.thermal_p_var, norm=self.p_12 + ) # randomly variates the power of thermal apps, otherwise variability is 0 + self.random_cycle1 = duty_cycle( + var=self.r_c1, t1=self.t_11, p1=p_11, t2=self.t_12, p2=p_12 + ) # randomise also the fixed cycle self.random_cycle2 = self.random_cycle1 self.random_cycle3 = self.random_cycle1 if self.fixed_cycle >= 2: - p_21 = random_variation(var=self.thermal_p_var, norm=self.p_21) #randomly variates the power of thermal apps, otherwise variability is 0 - p_22 = random_variation(var=self.thermal_p_var, norm=self.p_22) #randomly variates the power of thermal apps, otherwise variability is 0 - self.random_cycle2 = duty_cycle(var=self.r_c2, t1=self.t_21, p1=p_21, t2=self.t_22, p2=p_22) #randomise also the fixed cycle + p_21 = random_variation( + var=self.thermal_p_var, norm=self.p_21 + ) # randomly variates the power of thermal apps, otherwise variability is 0 + p_22 = random_variation( + var=self.thermal_p_var, norm=self.p_22 + ) # randomly variates the power of thermal apps, otherwise variability is 0 + self.random_cycle2 = duty_cycle( + var=self.r_c2, t1=self.t_21, p1=p_21, t2=self.t_22, p2=p_22 + ) # randomise also the fixed cycle if self.fixed_cycle >= 3: - p_31 = random_variation(var=self.thermal_p_var, norm=self.p_31) #randomly variates the power of thermal apps, otherwise variability is 0 - p_32 = random_variation(var=self.thermal_p_var, norm=self.p_32) #randomly variates the power of thermal apps, otherwise variability is 0 - self.random_cycle1 = random_choice(self.r_c1, t1=self.t_11, p1=p_11, t2=self.t_12, p2=p_12) + p_31 = random_variation( + var=self.thermal_p_var, norm=self.p_31 + ) # randomly variates the power of thermal apps, otherwise variability is 0 + p_32 = random_variation( + var=self.thermal_p_var, norm=self.p_32 + ) # randomly variates the power of thermal apps, otherwise variability is 0 + self.random_cycle1 = random_choice( + self.r_c1, t1=self.t_11, p1=p_11, t2=self.t_12, p2=p_12 + ) - self.random_cycle2 = random_choice(self.r_c2, t1=self.t_21, p1=p_21, t2=self.t_22, p2=p_22) + self.random_cycle2 = random_choice( + self.r_c2, t1=self.t_21, p1=p_21, t2=self.t_22, p2=p_22 + ) - self.random_cycle3 = random_choice(self.r_c3, t1=self.t_31, p1=p_31, t2=self.t_32, p2=p_32) + self.random_cycle3 = random_choice( + self.r_c3, t1=self.t_31, p1=p_31, t2=self.t_32, p2=p_32 + ) def update_available_time_for_switch_on_events(self, indexes): """Remove the given time indexes from the ranges available to switch appliance on @@ -923,10 +1004,14 @@ def update_available_time_for_switch_on_events(self, indexes): pass # nothing to do as the whole range should be removed, which is already the case from line above elif indexes[0] == spot_to_split.start: # reinsert a range going from end of indexes up to the end of picked range - self.free_spots.insert(spot_idx, slice(indexes[-1] + 1, spot_to_split.stop, None)) + self.free_spots.insert( + spot_idx, slice(indexes[-1] + 1, spot_to_split.stop, None) + ) elif indexes[-1] == spot_to_split.stop: # reinsert a range going from beginning of picked range up to the beginning of indexes - self.free_spots.insert(spot_idx, slice(spot_to_split.start, indexes[0], None)) + self.free_spots.insert( + spot_idx, slice(spot_to_split.start, indexes[0], None) + ) else: # split the range into 2 smaller ranges new_spot1 = slice(spot_to_split.start, indexes[0], None) @@ -946,26 +1031,38 @@ def update_daily_use(self, coincidence, power, indexes): """ - if self.fixed_cycle > 0: # evaluates if the app has some duty cycles to be considered + if ( + self.fixed_cycle > 0 + ): # evaluates if the app has some duty cycles to be considered evaluate = np.round(np.mean(indexes)) if indexes.size > 0 else 0 # selects the proper duty cycle and puts the corresponding power values in the indexes range - if evaluate in range(self.cw11[0], self.cw11[1]) or evaluate in range(self.cw12[0], self.cw12[1]): + if evaluate in range(self.cw11[0], self.cw11[1]) or evaluate in range( + self.cw12[0], self.cw12[1] + ): np.put(self.daily_use, indexes, (self.random_cycle1 * coincidence)) - elif evaluate in range(self.cw21[0], self.cw21[1]) or evaluate in range(self.cw22[0], self.cw22[1]): + elif evaluate in range(self.cw21[0], self.cw21[1]) or evaluate in range( + self.cw22[0], self.cw22[1] + ): np.put(self.daily_use, indexes, (self.random_cycle2 * coincidence)) else: np.put(self.daily_use, indexes, (self.random_cycle3 * coincidence)) else: # if no duty cycles are specified, a regular switch_on event is modelled # randomises also the App Power if thermal_p_var is on - np.put(self.daily_use, indexes, (random_variation(var=self.thermal_p_var, norm=coincidence * power))) + np.put( + self.daily_use, + indexes, + (random_variation(var=self.thermal_p_var, norm=coincidence * power)), + ) # updates the time ranges remaining for switch on events, excluding the current switch_on event self.update_available_time_for_switch_on_events(indexes) def calc_rand_window(self, window_idx=1, window_range_limits=[0, 1440]): - _window = self.__getattribute__(f'window_{window_idx}') - _random_var = self.__getattribute__(f'random_var_{window_idx}') - rand_window = [random.randint(_window[0] - _random_var, _window[0] + _random_var), - random.randint(_window[1] - _random_var, _window[1] + _random_var)] + _window = self.__getattribute__(f"window_{window_idx}") + _random_var = self.__getattribute__(f"random_var_{window_idx}") + rand_window = [ + random.randint(_window[0] - _random_var, _window[0] + _random_var), + random.randint(_window[1] - _random_var, _window[1] + _random_var), + ] if rand_window[0] < window_range_limits[0]: rand_window[0] = window_range_limits[0] if rand_window[1] > window_range_limits[1]: @@ -1010,7 +1107,9 @@ def specific_cycle(self, cycle_num, **kwargs): elif cycle_num == 3: self.specific_cycle_3(**kwargs) - def specific_cycle_1(self, p_11 = 0, t_11 = 0, p_12 = 0, t_12 = 0, r_c1 = 0, cw11=None, cw12=None): + def specific_cycle_1( + self, p_11=0, t_11=0, p_12=0, t_12=0, r_c1=0, cw11=None, cw12=None + ): """assigining the frist specific duty cycle for the appliace (maximum of three cycles can be assigned) Parameters @@ -1046,10 +1145,13 @@ def specific_cycle_1(self, p_11 = 0, t_11 = 0, p_12 = 0, t_12 = 0, r_c1 = 0, cw1 if cw12 is not None: self.cw12 = cw12 # Below is not used - self.fixed_cycle1 = np.concatenate(((np.ones(self.t_11)*p_11),(np.ones(self.t_12)*p_12))) #create numpy array representing the duty cycle - - def specific_cycle_2(self, p_21 = 0, t_21 = 0, p_22 = 0, t_22 = 0, r_c2 = 0, cw21=None, cw22=None): + self.fixed_cycle1 = np.concatenate( + ((np.ones(self.t_11) * p_11), (np.ones(self.t_12) * p_12)) + ) # create numpy array representing the duty cycle + def specific_cycle_2( + self, p_21=0, t_21=0, p_22=0, t_22=0, r_c2=0, cw21=None, cw22=None + ): """assigining the frist specific duty cycle for the appliace (maximum of three cycles can be assigned) Parameters @@ -1085,9 +1187,13 @@ def specific_cycle_2(self, p_21 = 0, t_21 = 0, p_22 = 0, t_22 = 0, r_c2 = 0, cw2 if cw22 is not None: self.cw22 = cw22 # Below is not used - self.fixed_cycle2 = np.concatenate(((np.ones(self.t_21)*p_21),(np.ones(self.t_22)*p_22))) + self.fixed_cycle2 = np.concatenate( + ((np.ones(self.t_21) * p_21), (np.ones(self.t_22) * p_22)) + ) - def specific_cycle_3(self, p_31 = 0, t_31 = 0, p_32 = 0, t_32 = 0, r_c3 = 0, cw31=None, cw32=None): + def specific_cycle_3( + self, p_31=0, t_31=0, p_32=0, t_32=0, r_c3=0, cw31=None, cw32=None + ): """assigining the frist specific duty cycle for the appliace (maximum of three cycles can be assigned) Parameters @@ -1123,10 +1229,20 @@ def specific_cycle_3(self, p_31 = 0, t_31 = 0, p_32 = 0, t_32 = 0, r_c3 = 0, cw3 if cw32 is not None: self.cw32 = cw32 # Below is not used - self.fixed_cycle3 = np.concatenate(((np.ones(self.t_31)*p_31),(np.ones(self.t_32)*p_32))) + self.fixed_cycle3 = np.concatenate( + ((np.ones(self.t_31) * p_31), (np.ones(self.t_32) * p_32)) + ) - #different time windows can be associated with different specific duty cycles - def cycle_behaviour(self, cw11 = np.array([0,0]), cw12 = np.array([0,0]), cw21 = np.array([0,0]), cw22 = np.array([0,0]), cw31 = np.array([0,0]), cw32 = np.array([0,0])): + # different time windows can be associated with different specific duty cycles + def cycle_behaviour( + self, + cw11=np.array([0, 0]), + cw12=np.array([0, 0]), + cw21=np.array([0, 0]), + cw22=np.array([0, 0]), + cw31=np.array([0, 0]), + cw32=np.array([0, 0]), + ): """_summary_ Parameters @@ -1145,11 +1261,11 @@ def cycle_behaviour(self, cw11 = np.array([0,0]), cw12 = np.array([0,0]), cw21 = Window time range for the second part of third duty cycle number, by default np.array([0,0]) """ # only used around line 223 - self.cw11 = cw11 #first window associated with cycle1 - self.cw12 = cw12 #second window associated with cycle1 - self.cw21 = cw21 #same for cycle2 + self.cw11 = cw11 # first window associated with cycle1 + self.cw12 = cw12 # second window associated with cycle1 + self.cw21 = cw21 # same for cycle2 self.cw22 = cw22 - self.cw31 = cw31 #same for cycle 3 + self.cw31 = cw31 # same for cycle 3 self.cw32 = cw32 def rand_total_time_of_use( @@ -1157,9 +1273,8 @@ def rand_total_time_of_use( rand_window_1: Iterable[int], rand_window_2: Iterable[int], rand_window_3: Iterable[int], - ) -> int: - """Randomised total time of use of the Appliance instance - """ + ) -> int: + """Randomised total time of use of the Appliance instance""" random_var_t = random_variation(var=self.time_fraction_random_variability) @@ -1182,10 +1297,12 @@ def rand_total_time_of_use( rand_time = int(0.99 * total_time) if rand_time < self.func_cycle: - raise ValueError(f"The func_cycle you choose for appliance {self.name} might be too large to fit in the available time for appliance usage, please either reduce func_cycle or increase the windows of use of the appliance") + raise ValueError( + f"The func_cycle you choose for appliance {self.name} might be too large to fit in the available time for appliance usage, please either reduce func_cycle or increase the windows of use of the appliance" + ) return rand_time - def rand_switch_on_window(self, rand_time:int): + def rand_switch_on_window(self, rand_time: int): """Identifies a random switch on window within the available functioning windows This corresponds to step 2c. of: @@ -1198,23 +1315,30 @@ def rand_switch_on_window(self, rand_time:int): indexes_choice = [] for s in self.free_spots: if s.stop - s.start >= self.func_cycle: - indexes_choice += [*range(s.start, s.stop - self.func_cycle + 1)] # this will be fast with cython + indexes_choice += [ + *range(s.start, s.stop - self.func_cycle + 1) + ] # this will be fast with cython n_choices = len(indexes_choice) if n_choices > 0: # Identifies a random switch on time within the available functioning windows # step 2c of [1] - switch_on = indexes_choice[random.randint(0, n_choices-1)] + switch_on = indexes_choice[random.randint(0, n_choices - 1)] spot_idx = None for i, fs in enumerate(self.free_spots): if fs.start <= switch_on <= fs.stop - self.func_cycle: spot_idx = i break - largest_duration = min(rand_time, self.free_spots[spot_idx].stop - switch_on) + largest_duration = min( + rand_time, self.free_spots[spot_idx].stop - switch_on + ) if largest_duration > self.func_cycle: - indexes = np.arange(switch_on, switch_on + ( - int(random.uniform(self.func_cycle, largest_duration)))) # TODO randint + indexes = np.arange( + switch_on, + switch_on + + (int(random.uniform(self.func_cycle, largest_duration))), + ) # TODO randint elif largest_duration == self.func_cycle: indexes = np.arange(switch_on, switch_on + largest_duration) else: @@ -1222,14 +1346,16 @@ def rand_switch_on_window(self, rand_time:int): print("max window", self.free_spots[spot_idx].stop) print("rand_time", rand_time) print("upper_limit", largest_duration) - raise ValueError("There is something fishy with upper limit in switch on...") + raise ValueError( + "There is something fishy with upper limit in switch on..." + ) else: indexes = None # there are no available windows anymore return indexes - def calc_coincident_switch_on(self, inside_peak_window:bool=True): + def calc_coincident_switch_on(self, inside_peak_window: bool = True): """Computes how many of the 'n' Appliance instance are switched on simultaneously Implement eqs. 3 and 4 of [1] @@ -1241,12 +1367,23 @@ def calc_coincident_switch_on(self, inside_peak_window:bool=True): s_peak, mu_peak, op_factor = switch_on_parameters() # check if indexes are within peak window - if inside_peak_window is True and self.fixed == 'no': + if inside_peak_window is True and self.fixed == "no": # calculates coincident behaviour within the peak time range # eq. 4 of [1] - coincidence = min(self.number, max(1, math.ceil(random.gauss(mu=(self.number * mu_peak + 0.5), sigma=(s_peak * self.number * mu_peak))))) + coincidence = min( + self.number, + max( + 1, + math.ceil( + random.gauss( + mu=(self.number * mu_peak + 0.5), + sigma=(s_peak * self.number * mu_peak), + ) + ), + ), + ) # check if indexes are off-peak - elif inside_peak_window is False and self.fixed == 'no': + elif inside_peak_window is False and self.fixed == "no": # calculates probability of coincident switch_ons off-peak # eq. 3 of [1] prob = random.uniform(0, (self.number - op_factor) / self.number) @@ -1279,12 +1416,11 @@ def generate_load_profile(self, prof_i, peak_time_range, day_type, power): # skip this appliance in any of the following applies if ( - # evaluates if occasional use happens or not - (random.uniform(0, 1) > self.occasional_use - # evaluates if daily preference coincides with the randomised daily preference number - or (self.pref_index != 0 and self.user.rand_daily_pref != self.pref_index) - # checks if the app is allowed in the given yearly behaviour pattern - or self.wd_we_type not in [day_type, 2]) + random.uniform(0, 1) > self.occasional_use + # evaluates if daily preference coincides with the randomised daily preference number + or (self.pref_index != 0 and self.user.rand_daily_pref != self.pref_index) + # checks if the app is allowed in the given yearly behaviour pattern + or self.wd_we_type not in [day_type, 2] ): return @@ -1297,82 +1433,73 @@ def generate_load_profile(self, prof_i, peak_time_range, day_type, power): # random variability is applied to the total functioning time and to the duration # of the duty cycles provided they have been specified # step 2a of [1] - rand_time = self.rand_total_time_of_use(rand_window_1, rand_window_2, rand_window_3) + rand_time = self.rand_total_time_of_use( + rand_window_1, rand_window_2, rand_window_3 + ) # redefines functioning windows based on the previous randomisation of the boundaries # step 2b of [1] - if self.flat == 'yes': + if self.flat == "yes": # for "flat" appliances the algorithm stops right after filling the newly # created windows without applying any further stochasticity total_power_value = self.power[prof_i] * self.number for rand_window in rand_windows: - self.daily_use[rand_window[0]:rand_window[1]] = np.full(np.diff(rand_window), - total_power_value) - #single_load = single_load + self.daily_use + self.daily_use[rand_window[0] : rand_window[1]] = np.full( + np.diff(rand_window), total_power_value + ) + # single_load = single_load + self.daily_use return else: # "non-flat" appliances a mask is applied on the newly defined windows and # the algorithm goes further on for rand_window in rand_windows: - self.daily_use[rand_window[0]:rand_window[1]] = np.full(np.diff(rand_window), 0.001) + self.daily_use[rand_window[0] : rand_window[1]] = np.full( + np.diff(rand_window), 0.001 + ) # calculates randomised cycles taking the random variability in the duty cycle duration self.assign_random_cycles() # steps 2c-2e repeated until the sum of the durations of all the switch-on events equals rand_time - - - self.free_spots = [slice(rw[0], rw[1], None) for rw in rand_windows if rw[0] != rw[1]] + self.free_spots = [ + slice(rw[0], rw[1], None) for rw in rand_windows if rw[0] != rw[1] + ] tot_time = 0 while tot_time <= rand_time: - - - # one option could be to generate a lot of them at once indexes = self.rand_switch_on_window( - rand_time=rand_time, #TODO maybe only consider rand_time-tot_time ... + rand_time=rand_time, # TODO maybe only consider rand_time-tot_time ... ) if indexes is None: - break # exit cycle and go to next Appliance as there are no available windows anymore - + break # exit cycle and go to next Appliance as there are no available windows anymore # the count of total time is updated with the size of the indexes array tot_time = tot_time + indexes.size if tot_time > rand_time: # the total functioning time is reached, a correction is applied to avoid overflow of indexes - indexes_adj = indexes[:-(tot_time - rand_time)] + indexes_adj = indexes[: -(tot_time - rand_time)] if len(indexes_adj) > 0: - inside_peak_window = within_peak_time_window(indexes_adj[0], indexes_adj[-1], peak_time_range[0], peak_time_range[-1]) + inside_peak_window = within_peak_time_window( + indexes_adj[0], + indexes_adj[-1], + peak_time_range[0], + peak_time_range[-1], + ) # Computes how many of the 'n' of the Appliance instance are switched on simultaneously - coincidence = self.calc_coincident_switch_on( - inside_peak_window - ) + coincidence = self.calc_coincident_switch_on(inside_peak_window) # Update the daily use depending on existence of duty cycles of the Appliance instance - self.update_daily_use( - coincidence, - power=power, - indexes=indexes_adj - ) + self.update_daily_use(coincidence, power=power, indexes=indexes_adj) break # exit cycle and go to next Appliance else: - inside_peak_window = within_peak_time_window(indexes[0], indexes[-1], peak_time_range[0], peak_time_range[-1]) - - - - coincidence = self.calc_coincident_switch_on( - inside_peak_window + inside_peak_window = within_peak_time_window( + indexes[0], indexes[-1], peak_time_range[0], peak_time_range[-1] ) - # Update the daily use depending on existence of duty cycles of the Appliance instance - self.update_daily_use( - coincidence, - power=power, - indexes=indexes - ) - - + coincidence = self.calc_coincident_switch_on(inside_peak_window) + # Update the daily use depending on existence of duty cycles of the Appliance instance + self.update_daily_use(coincidence, power=power, indexes=indexes) diff --git a/ramp/core/initialise.py b/ramp/core/initialise.py index 8cd1d673..4a9d200e 100644 --- a/ramp/core/initialise.py +++ b/ramp/core/initialise.py @@ -1,13 +1,12 @@ # -*- coding: utf-8 -*- -#%% Initialisation of a model instance +# %% Initialisation of a model instance import numpy as np import importlib from ramp.core.core import UseCase - def user_defined_inputs(j=None, fname=None): """Imports an input file and returns a processed user_list diff --git a/ramp/core/stochastic_process.py b/ramp/core/stochastic_process.py index 2a0e295e..c9afddbd 100644 --- a/ramp/core/stochastic_process.py +++ b/ramp/core/stochastic_process.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- -#%% Import required libraries +# %% Import required libraries import numpy as np -import random +import random import math from ramp.core.initialise import initialise_inputs from ramp.core.core import UseCase -#%% Core model stochastic script +# %% Core model stochastic script def calc_peak_time_range(user_list, peak_enlarge=0.15): @@ -44,16 +44,24 @@ def calc_peak_time_range(user_list, peak_enlarge=0.15): # Find the peak window within the theoretical max profile peak_window = np.squeeze(np.argwhere(tot_max_profile == np.amax(tot_max_profile))) # Within the peak_window, randomly calculate the peak_time using a gaussian distribution - peak_time = round(random.normalvariate( - mu=round(np.average(peak_window)), - sigma=1 / 3 * (peak_window[-1] - peak_window[0]) - )) - rand_peak_enlarge = round(math.fabs(peak_time - random.gauss(mu=peak_time, sigma=peak_enlarge * peak_time))) + peak_time = round( + random.normalvariate( + mu=round(np.average(peak_window)), + sigma=1 / 3 * (peak_window[-1] - peak_window[0]), + ) + ) + rand_peak_enlarge = round( + math.fabs( + peak_time - random.gauss(mu=peak_time, sigma=peak_enlarge * peak_time) + ) + ) # The peak_time is randomly enlarged based on the calibration parameter peak_enlarge return np.arange(peak_time - rand_peak_enlarge, peak_time + rand_peak_enlarge) -def stochastic_process(j=None, fname=None, num_profiles=None, day_type=None, parallel=False): +def stochastic_process( + j=None, fname=None, num_profiles=None, day_type=None, parallel=False +): """Generate num_profiles load profile for the usecase Covers steps 1. and 2. of the algorithm described in [1], p.6-7 @@ -77,8 +85,12 @@ def stochastic_process(j=None, fname=None, num_profiles=None, day_type=None, par uc = UseCase(users=user_list) if parallel is True: - profiles = uc.generate_daily_load_profiles_parallel(num_profiles, peak_time_range, day_type) + profiles = uc.generate_daily_load_profiles_parallel( + num_profiles, peak_time_range, day_type + ) else: - profiles = uc.generate_daily_load_profiles(num_profiles, peak_time_range, day_type) + profiles = uc.generate_daily_load_profiles( + num_profiles, peak_time_range, day_type + ) return profiles diff --git a/ramp/core/utils.py b/ramp/core/utils.py index 34ef37e7..f0bfd867 100644 --- a/ramp/core/utils.py +++ b/ramp/core/utils.py @@ -166,6 +166,7 @@ def random_choice(var, t1, p1, t2, p2): ] ) + def get_day_type(day): """Given a datetime object return 0 for weekdays or 1 for weekends""" if day.weekday() > 4: @@ -188,7 +189,11 @@ def yearly_pattern(year=None): year_behaviour = year_behaviour.tolist() else: # a list with 0 for weekdays and 1 for weekends - year_behaviour = pd.date_range(start=f"{year}-01-01", end=f"{year}-12-31", freq="D").map(get_day_type).to_list() + year_behaviour = ( + pd.date_range(start=f"{year}-01-01", end=f"{year}-12-31", freq="D") + .map(get_day_type) + .to_list() + ) return year_behaviour @@ -205,11 +210,18 @@ def within_peak_time_window(win_start, win_stop, peak_win_start, peak_win_stop): def calc_time_taken(func): - """ Calculates the time elapsed during the execution of a function""" + """Calculates the time elapsed during the execution of a function""" + def wrapper(*args, **kwargs): start = time.time() result = func(*args, **kwargs) end = time.time() - print(func.__name__ + ' required ' + str((end-start)*1) + ' seconds for execution. ') + print( + func.__name__ + + " required " + + str((end - start) * 1) + + " seconds for execution. " + ) return result + return wrapper diff --git a/ramp/errors_logs/errors.py b/ramp/errors_logs/errors.py index 07ca2bc4..d1d381e0 100644 --- a/ramp/errors_logs/errors.py +++ b/ramp/errors_logs/errors.py @@ -1,10 +1,10 @@ class InvalidType(TypeError): - """Raises when an invalid data type is passed - """ + """Raises when an invalid data type is passed""" + pass + class InvalidWindow(ValueError): - """Raises when an invalid functioning window is passed - """ - pass + """Raises when an invalid functioning window is passed""" + pass diff --git a/ramp/example/examples.py b/ramp/example/examples.py index 7ec2e0af..37539dbf 100644 --- a/ramp/example/examples.py +++ b/ramp/example/examples.py @@ -1,4 +1,3 @@ - import os import pandas as pd import shutil @@ -9,27 +8,31 @@ ) ) -available_data = {"shower":"shower_P.csv"} +available_data = {"shower": "shower_P.csv"} -def load_data(example:str) -> pd.DataFrame: +def load_data(example: str) -> pd.DataFrame: if example not in available_data: raise ValueError(f"valid examples are {[*available_data]}") - - return pd.read_csv( - f"{path}/{available_data[example]}" - ) + return pd.read_csv(f"{path}/{available_data[example]}") -def download_example(destination:str): - """ Copies the model files from the ramp package to a given path +def download_example(destination: str): + """Copies the model files from the ramp package to a given path Parameters ----------- destination : str The path to copy the model files. """ - files = ["input_file_1.py","input_file_2.py","input_file_3.py","shower_P.csv","daily_T.csv","T_gw.csv"] + files = [ + "input_file_1.py", + "input_file_2.py", + "input_file_3.py", + "shower_P.csv", + "daily_T.csv", + "T_gw.csv", + ] for file in files: - shutil.copyfile(src=f"{path}/{file}", dst=f"{destination}/{file}") \ No newline at end of file + shutil.copyfile(src=f"{path}/{file}", dst=f"{destination}/{file}") diff --git a/ramp/example/input_file_1.py b/ramp/example/input_file_1.py index f84e8806..fae61218 100644 --- a/ramp/example/input_file_1.py +++ b/ramp/example/input_file_1.py @@ -1,244 +1,256 @@ # -*- coding: utf-8 -*- -#%% Definition of the inputs -''' +# %% Definition of the inputs +""" Input data definition -''' +""" from ramp.core.core import User + User_list = [] -''' +""" This example input file represents an whole village-scale community, adapted from the data used for the Journal publication. It should provide a complete guidance to most of the possibilities ensured by RAMP for inputs definition, including specific modular duty cycles and cooking cycles. For examples related to "thermal loads", see the "input_file_2". -''' +""" -#Create new user classes -HI = User("high income",11,3) +# Create new user classes +HI = User("high income", 11, 3) User_list.append(HI) -HMI = User("higher middle income",38,3) +HMI = User("higher middle income", 38, 3) User_list.append(HMI) -LMI = User("lower middle income",34,3) +LMI = User("lower middle income", 34, 3) User_list.append(LMI) -LI = User("low income",45,3) +LI = User("low income", 45, 3) User_list.append(LI) -Hospital = User("hospital",1) +Hospital = User("hospital", 1) User_list.append(Hospital) -School = User("school",1) +School = User("school", 1) User_list.append(School) -Public_lighting = User("public lighting",1) +Public_lighting = User("public lighting", 1) User_list.append(Public_lighting) -Church = User("church",3) +Church = User("church", 3) User_list.append(Church) -#Create new appliances - -#Church -Ch_indoor_bulb = Church.Appliance(10,26,1,210,0.2,60,'yes', flat = 'yes') -Ch_indoor_bulb.windows([1200,1440],[0,0],0.1) - -Ch_outdoor_bulb = Church.Appliance(7,26,1,150,0.2,60, 'yes', flat = 'yes') -Ch_outdoor_bulb.windows([1200,1440],[0,0],0.1) - -Ch_speaker = Church.Appliance(1,100,1,150,0.2,60) -Ch_speaker.windows([1200,1350],[0,0],0.1) - -#Public lighting -Pub_lights = Public_lighting.Appliance(12,40,2,310,0.1,300, 'yes', flat = 'yes') -Pub_lights.windows([0,336],[1110,1440],0.2) - -Pub_lights_2 = Public_lighting.Appliance(25,150,2,310,0.1,300, 'yes', flat = 'yes') -Pub_lights_2.windows([0,336],[1110,1440],0.2) - - -#High-Income -HI_indoor_bulb = HI.Appliance(6,7,2,120,0.2,10) -HI_indoor_bulb.windows([1170,1440],[0,30],0.35) +# Create new appliances -HI_outdoor_bulb = HI.Appliance(2,13,2,600,0.2,10) -HI_outdoor_bulb.windows([0,330],[1170,1440],0.35) +# Church +Ch_indoor_bulb = Church.Appliance(10, 26, 1, 210, 0.2, 60, "yes", flat="yes") +Ch_indoor_bulb.windows([1200, 1440], [0, 0], 0.1) -HI_TV = HI.Appliance(2,60,3,180,0.1,5) -HI_TV.windows([720,900],[1170,1440],0.35,[0,60]) +Ch_outdoor_bulb = Church.Appliance(7, 26, 1, 150, 0.2, 60, "yes", flat="yes") +Ch_outdoor_bulb.windows([1200, 1440], [0, 0], 0.1) -HI_DVD = HI.Appliance(1,8,3,60,0.1,5) -HI_DVD.windows([720,900],[1170,1440],0.35,[0,60]) +Ch_speaker = Church.Appliance(1, 100, 1, 150, 0.2, 60) +Ch_speaker.windows([1200, 1350], [0, 0], 0.1) -HI_Antenna = HI.Appliance(1,8,3,120,0.1,5) -HI_Antenna.windows([720,900],[1170,1440],0.35,[0,60]) +# Public lighting +Pub_lights = Public_lighting.Appliance(12, 40, 2, 310, 0.1, 300, "yes", flat="yes") +Pub_lights.windows([0, 336], [1110, 1440], 0.2) -HI_Phone_charger = HI.Appliance(5,2,2,300,0.2,5) -HI_Phone_charger.windows([1110,1440],[0,30],0.35) +Pub_lights_2 = Public_lighting.Appliance(25, 150, 2, 310, 0.1, 300, "yes", flat="yes") +Pub_lights_2.windows([0, 336], [1110, 1440], 0.2) -HI_Freezer = HI.Appliance(1,200,1,1440,0,30,'yes',3) -HI_Freezer.windows([0,1440],[0,0]) -HI_Freezer.specific_cycle_1(200,20,5,10) -HI_Freezer.specific_cycle_2(200,15,5,15) -HI_Freezer.specific_cycle_3(200,10,5,20) -HI_Freezer.cycle_behaviour([480,1200],[0,0],[300,479],[0,0],[0,299],[1201,1440]) -HI_Freezer2 = HI.Appliance(1,200,1,1440,0,30,'yes',3) -HI_Freezer2.windows([0,1440],[0,0]) -HI_Freezer2.specific_cycle_1(200,20,5,10) -HI_Freezer2.specific_cycle_2(200,15,5,15) -HI_Freezer2.specific_cycle_3(200,10,5,20) -HI_Freezer2.cycle_behaviour([480,1200],[0,0],[300,479],[0,0],[0,299],[1201,1440]) +# High-Income +HI_indoor_bulb = HI.Appliance(6, 7, 2, 120, 0.2, 10) +HI_indoor_bulb.windows([1170, 1440], [0, 30], 0.35) -HI_Mixer = HI.Appliance(1,50,3,30,0.1,1,occasional_use = 0.33) -HI_Mixer.windows([420,480],[660,750],0.35,[1140,1200]) +HI_outdoor_bulb = HI.Appliance(2, 13, 2, 600, 0.2, 10) +HI_outdoor_bulb.windows([0, 330], [1170, 1440], 0.35) -#Higher-Middle Income -HMI_indoor_bulb = HMI.Appliance(5,7,2,120,0.2,10) -HMI_indoor_bulb.windows([1170,1440],[0,30],0.35) +HI_TV = HI.Appliance(2, 60, 3, 180, 0.1, 5) +HI_TV.windows([720, 900], [1170, 1440], 0.35, [0, 60]) -HMI_outdoor_bulb = HMI.Appliance(2,13,2,600,0.2,10) -HMI_outdoor_bulb.windows([0,330],[1170,1440],0.35) +HI_DVD = HI.Appliance(1, 8, 3, 60, 0.1, 5) +HI_DVD.windows([720, 900], [1170, 1440], 0.35, [0, 60]) -HMI_TV = HMI.Appliance(1,60,2,120,0.1,5) -HMI_TV.windows([1170,1440],[0,60],0.35) +HI_Antenna = HI.Appliance(1, 8, 3, 120, 0.1, 5) +HI_Antenna.windows([720, 900], [1170, 1440], 0.35, [0, 60]) -HMI_DVD = HMI.Appliance(1,8,2,40,0.1,5) -HMI_DVD.windows([1170,1440],[0,60],0.35) +HI_Phone_charger = HI.Appliance(5, 2, 2, 300, 0.2, 5) +HI_Phone_charger.windows([1110, 1440], [0, 30], 0.35) -HMI_Antenna = HMI.Appliance(1,8,2,80,0.1,5) -HMI_Antenna.windows([1170,1440],[0,60],0.35) +HI_Freezer = HI.Appliance(1, 200, 1, 1440, 0, 30, "yes", 3) +HI_Freezer.windows([0, 1440], [0, 0]) +HI_Freezer.specific_cycle_1(200, 20, 5, 10) +HI_Freezer.specific_cycle_2(200, 15, 5, 15) +HI_Freezer.specific_cycle_3(200, 10, 5, 20) +HI_Freezer.cycle_behaviour( + [480, 1200], [0, 0], [300, 479], [0, 0], [0, 299], [1201, 1440] +) -HMI_Radio = HMI.Appliance(1,36,2,60,0.1,5) -HMI_Radio.windows([390,450],[1140,1260],0.35) +HI_Freezer2 = HI.Appliance(1, 200, 1, 1440, 0, 30, "yes", 3) +HI_Freezer2.windows([0, 1440], [0, 0]) +HI_Freezer2.specific_cycle_1(200, 20, 5, 10) +HI_Freezer2.specific_cycle_2(200, 15, 5, 15) +HI_Freezer2.specific_cycle_3(200, 10, 5, 20) +HI_Freezer2.cycle_behaviour( + [480, 1200], [0, 0], [300, 479], [0, 0], [0, 299], [1201, 1440] +) -HMI_Phone_charger = HMI.Appliance(4,2,2,300,0.2,5) -HMI_Phone_charger.windows([1110,1440],[0,30],0.35) +HI_Mixer = HI.Appliance(1, 50, 3, 30, 0.1, 1, occasional_use=0.33) +HI_Mixer.windows([420, 480], [660, 750], 0.35, [1140, 1200]) -HMI_Freezer = HMI.Appliance(1,200,1,1440,0,30, 'yes',3) -HMI_Freezer.windows([0,1440],[0,0]) -HMI_Freezer.specific_cycle_1(200,20,5,10) -HMI_Freezer.specific_cycle_2(200,15,5,15) -HMI_Freezer.specific_cycle_3(200,10,5,20) -HMI_Freezer.cycle_behaviour([480,1200],[0,0],[300,479],[0,0],[0,299],[1201,1440]) +# Higher-Middle Income +HMI_indoor_bulb = HMI.Appliance(5, 7, 2, 120, 0.2, 10) +HMI_indoor_bulb.windows([1170, 1440], [0, 30], 0.35) -HMI_Mixer = HMI.Appliance(1,50,3,30,0.1,1, occasional_use = 0.33) -HMI_Mixer.windows([420,450],[660,750],0.35,[1020,1170]) +HMI_outdoor_bulb = HMI.Appliance(2, 13, 2, 600, 0.2, 10) +HMI_outdoor_bulb.windows([0, 330], [1170, 1440], 0.35) -#Lower-Midlle Income -LMI_indoor_bulb = LMI.Appliance(3,7,2,120,0.2,10) -LMI_indoor_bulb.windows([1170,1440],[0,30],0.35) +HMI_TV = HMI.Appliance(1, 60, 2, 120, 0.1, 5) +HMI_TV.windows([1170, 1440], [0, 60], 0.35) -LMI_outdoor_bulb = LMI.Appliance(2,13,2,600,0.2,10) -LMI_outdoor_bulb.windows([0,330],[1170,1440],0.35) +HMI_DVD = HMI.Appliance(1, 8, 2, 40, 0.1, 5) +HMI_DVD.windows([1170, 1440], [0, 60], 0.35) -LMI_TV = LMI.Appliance(1,60,3,90,0.1,5) -LMI_TV.windows([450,660],[720,840],0.35,[1170,1440]) +HMI_Antenna = HMI.Appliance(1, 8, 2, 80, 0.1, 5) +HMI_Antenna.windows([1170, 1440], [0, 60], 0.35) -LMI_DVD = LMI.Appliance(1,8,3,30,0.1,5) -LMI_DVD.windows([450,660],[720,840],0.35,[1170,1440]) +HMI_Radio = HMI.Appliance(1, 36, 2, 60, 0.1, 5) +HMI_Radio.windows([390, 450], [1140, 1260], 0.35) -LMI_Antenna = LMI.Appliance(1,8,3,60,0.1,5) -LMI_Antenna.windows([450,660],[720,840],0.35,[1170,1440]) +HMI_Phone_charger = HMI.Appliance(4, 2, 2, 300, 0.2, 5) +HMI_Phone_charger.windows([1110, 1440], [0, 30], 0.35) -LMI_Phone_charger = LMI.Appliance(4,2,1,300,0.2,5) -LMI_Phone_charger.windows([1020,1440],[0,0],0.35) +HMI_Freezer = HMI.Appliance(1, 200, 1, 1440, 0, 30, "yes", 3) +HMI_Freezer.windows([0, 1440], [0, 0]) +HMI_Freezer.specific_cycle_1(200, 20, 5, 10) +HMI_Freezer.specific_cycle_2(200, 15, 5, 15) +HMI_Freezer.specific_cycle_3(200, 10, 5, 20) +HMI_Freezer.cycle_behaviour( + [480, 1200], [0, 0], [300, 479], [0, 0], [0, 299], [1201, 1440] +) -LMI_Mixer = LMI.Appliance(1,50,2,30,0.1,1, occasional_use = 0.33) -LMI_Mixer.windows([660,750],[1110,1200],0.35) +HMI_Mixer = HMI.Appliance(1, 50, 3, 30, 0.1, 1, occasional_use=0.33) +HMI_Mixer.windows([420, 450], [660, 750], 0.35, [1020, 1170]) -#Low Income -LI_indoor_bulb = LI.Appliance(2,7,2,120,0.2,10) -LI_indoor_bulb.windows([1170,1440],[0,30],0.35) +# Lower-Midlle Income +LMI_indoor_bulb = LMI.Appliance(3, 7, 2, 120, 0.2, 10) +LMI_indoor_bulb.windows([1170, 1440], [0, 30], 0.35) -LI_outdoor_bulb = LI.Appliance(1,13,2,600,0.2,10) -LI_outdoor_bulb.windows([0,330],[1170,1440],0.35) +LMI_outdoor_bulb = LMI.Appliance(2, 13, 2, 600, 0.2, 10) +LMI_outdoor_bulb.windows([0, 330], [1170, 1440], 0.35) -LI_TV = LI.Appliance(1,60,3,90,0.1,5) -LI_TV.windows([750,840],[1170,1440],0.35,[0,30]) +LMI_TV = LMI.Appliance(1, 60, 3, 90, 0.1, 5) +LMI_TV.windows([450, 660], [720, 840], 0.35, [1170, 1440]) -LI_DVD = LI.Appliance(1,8,3,30,0.1,5) -LI_DVD.windows([750,840],[1170,1440],0.35,[0,30]) +LMI_DVD = LMI.Appliance(1, 8, 3, 30, 0.1, 5) +LMI_DVD.windows([450, 660], [720, 840], 0.35, [1170, 1440]) -LI_Antenna = LI.Appliance(1,8,3,60,0.1,5) -LI_Antenna.windows([750,840],[1170,1440],0.35,[0,30]) +LMI_Antenna = LMI.Appliance(1, 8, 3, 60, 0.1, 5) +LMI_Antenna.windows([450, 660], [720, 840], 0.35, [1170, 1440]) -LI_Phone_charger = LI.Appliance(2,2,1,300,0.2,5) -LI_Phone_charger.windows([1080,1440],[0,0],0.35) +LMI_Phone_charger = LMI.Appliance(4, 2, 1, 300, 0.2, 5) +LMI_Phone_charger.windows([1020, 1440], [0, 0], 0.35) -#Hospital -Ho_indoor_bulb = Hospital.Appliance(12,7,2,690,0.2,10) -Ho_indoor_bulb.windows([480,720],[870,1440],0.35) +LMI_Mixer = LMI.Appliance(1, 50, 2, 30, 0.1, 1, occasional_use=0.33) +LMI_Mixer.windows([660, 750], [1110, 1200], 0.35) -Ho_outdoor_bulb = Hospital.Appliance(1,13,2,690,0.2,10) -Ho_outdoor_bulb.windows([0,330],[1050,1440],0.35) +# Low Income +LI_indoor_bulb = LI.Appliance(2, 7, 2, 120, 0.2, 10) +LI_indoor_bulb.windows([1170, 1440], [0, 30], 0.35) -Ho_Phone_charger = Hospital.Appliance(8,2,2,300,0.2,5) -Ho_Phone_charger.windows([480,720],[900,1440],0.35) +LI_outdoor_bulb = LI.Appliance(1, 13, 2, 600, 0.2, 10) +LI_outdoor_bulb.windows([0, 330], [1170, 1440], 0.35) -Ho_Fridge = Hospital.Appliance(1,150,1,1440,0,30, 'yes',3) -Ho_Fridge.windows([0,1440],[0,0]) -Ho_Fridge.specific_cycle_1(150,20,5,10) -Ho_Fridge.specific_cycle_2(150,15,5,15) -Ho_Fridge.specific_cycle_3(150,10,5,20) -Ho_Fridge.cycle_behaviour([580,1200],[0,0],[420,579],[0,0],[0,419],[1201,1440]) +LI_TV = LI.Appliance(1, 60, 3, 90, 0.1, 5) +LI_TV.windows([750, 840], [1170, 1440], 0.35, [0, 30]) -Ho_Fridge2 = Hospital.Appliance(1,150,1,1440,0,30, 'yes',3) -Ho_Fridge2.windows([0,1440],[0,0]) -Ho_Fridge2.specific_cycle_1(150,20,5,10) -Ho_Fridge2.specific_cycle_2(150,15,5,15) -Ho_Fridge2.specific_cycle_3(150,10,5,20) -Ho_Fridge2.cycle_behaviour([580,1200],[0,0],[420,579],[0,0],[0,299],[1201,1440]) +LI_DVD = LI.Appliance(1, 8, 3, 30, 0.1, 5) +LI_DVD.windows([750, 840], [1170, 1440], 0.35, [0, 30]) -Ho_Fridge3 = Hospital.Appliance(1,150,1,1440,0.1,30, 'yes',3) -Ho_Fridge3.windows([0,1440],[0,0]) -Ho_Fridge3.specific_cycle_1(150,20,5,10) -Ho_Fridge3.specific_cycle_2(150,15,5,15) -Ho_Fridge3.specific_cycle_3(150,10,5,20) -Ho_Fridge3.cycle_behaviour([580,1200],[0,0],[420,479],[0,0],[0,419],[1201,1440]) +LI_Antenna = LI.Appliance(1, 8, 3, 60, 0.1, 5) +LI_Antenna.windows([750, 840], [1170, 1440], 0.35, [0, 30]) + +LI_Phone_charger = LI.Appliance(2, 2, 1, 300, 0.2, 5) +LI_Phone_charger.windows([1080, 1440], [0, 0], 0.35) + +# Hospital +Ho_indoor_bulb = Hospital.Appliance(12, 7, 2, 690, 0.2, 10) +Ho_indoor_bulb.windows([480, 720], [870, 1440], 0.35) + +Ho_outdoor_bulb = Hospital.Appliance(1, 13, 2, 690, 0.2, 10) +Ho_outdoor_bulb.windows([0, 330], [1050, 1440], 0.35) + +Ho_Phone_charger = Hospital.Appliance(8, 2, 2, 300, 0.2, 5) +Ho_Phone_charger.windows([480, 720], [900, 1440], 0.35) -Ho_PC = Hospital.Appliance(2,50,2,300,0.1,10) -Ho_PC.windows([480,720],[1050,1440],0.35) +Ho_Fridge = Hospital.Appliance(1, 150, 1, 1440, 0, 30, "yes", 3) +Ho_Fridge.windows([0, 1440], [0, 0]) +Ho_Fridge.specific_cycle_1(150, 20, 5, 10) +Ho_Fridge.specific_cycle_2(150, 15, 5, 15) +Ho_Fridge.specific_cycle_3(150, 10, 5, 20) +Ho_Fridge.cycle_behaviour( + [580, 1200], [0, 0], [420, 579], [0, 0], [0, 419], [1201, 1440] +) -Ho_Mixer = Hospital.Appliance(1,50,2,60,0.1,1,occasional_use = 0.33) -Ho_Mixer.windows([480,720],[1050,1440],0.35) +Ho_Fridge2 = Hospital.Appliance(1, 150, 1, 1440, 0, 30, "yes", 3) +Ho_Fridge2.windows([0, 1440], [0, 0]) +Ho_Fridge2.specific_cycle_1(150, 20, 5, 10) +Ho_Fridge2.specific_cycle_2(150, 15, 5, 15) +Ho_Fridge2.specific_cycle_3(150, 10, 5, 20) +Ho_Fridge2.cycle_behaviour( + [580, 1200], [0, 0], [420, 579], [0, 0], [0, 299], [1201, 1440] +) -#School -S_indoor_bulb = School.Appliance(8,7,1,60,0.2,10) -S_indoor_bulb.windows([1020,1080],[0,0],0.35) +Ho_Fridge3 = Hospital.Appliance(1, 150, 1, 1440, 0.1, 30, "yes", 3) +Ho_Fridge3.windows([0, 1440], [0, 0]) +Ho_Fridge3.specific_cycle_1(150, 20, 5, 10) +Ho_Fridge3.specific_cycle_2(150, 15, 5, 15) +Ho_Fridge3.specific_cycle_3(150, 10, 5, 20) +Ho_Fridge3.cycle_behaviour( + [580, 1200], [0, 0], [420, 479], [0, 0], [0, 419], [1201, 1440] +) -S_outdoor_bulb = School.Appliance(6,13,1,60,0.2,10) -S_outdoor_bulb.windows([1020,1080],[0,0],0.35) +Ho_PC = Hospital.Appliance(2, 50, 2, 300, 0.1, 10) +Ho_PC.windows([480, 720], [1050, 1440], 0.35) -S_Phone_charger = School.Appliance(5,2,2,180,0.2,5) -S_Phone_charger.windows([510,750],[810,1080],0.35) +Ho_Mixer = Hospital.Appliance(1, 50, 2, 60, 0.1, 1, occasional_use=0.33) +Ho_Mixer.windows([480, 720], [1050, 1440], 0.35) -S_PC = School.Appliance(18,50,2,210,0.1,10) -S_PC.windows([510,750],[810,1080],0.35) +# School +S_indoor_bulb = School.Appliance(8, 7, 1, 60, 0.2, 10) +S_indoor_bulb.windows([1020, 1080], [0, 0], 0.35) -S_Printer = School.Appliance(1,20,2,30,0.1,5) -S_Printer.windows([510,750],[810,1080],0.35) +S_outdoor_bulb = School.Appliance(6, 13, 1, 60, 0.2, 10) +S_outdoor_bulb.windows([1020, 1080], [0, 0], 0.35) -S_Freezer = School.Appliance(1,200,1,1440,0,30, 'yes',3) -S_Freezer.windows([0,1440]) -S_Freezer.specific_cycle_1(200,20,5,10) -S_Freezer.specific_cycle_2(200,15,5,15) -S_Freezer.specific_cycle_3(200,10,5,20) -S_Freezer.cycle_behaviour([580,1200],[0,0],[510,579],[0,0],[0,509],[1201,1440]) +S_Phone_charger = School.Appliance(5, 2, 2, 180, 0.2, 5) +S_Phone_charger.windows([510, 750], [810, 1080], 0.35) -S_TV = School.Appliance(1,60,2,120,0.1,5, occasional_use = 0.5) -S_TV.windows([510,750],[810,1080],0.35) +S_PC = School.Appliance(18, 50, 2, 210, 0.1, 10) +S_PC.windows([510, 750], [810, 1080], 0.35) -S_DVD = School.Appliance(1,8,2,120,0.1,5, occasional_use = 0.5) -S_DVD.windows([510,750],[810,1080],0.35) +S_Printer = School.Appliance(1, 20, 2, 30, 0.1, 5) +S_Printer.windows([510, 750], [810, 1080], 0.35) -S_Stereo = School.Appliance(1,150,2,90,0.1,5, occasional_use = 0.33) -S_Stereo.windows([510,750],[810,1080],0.35) +S_Freezer = School.Appliance(1, 200, 1, 1440, 0, 30, "yes", 3) +S_Freezer.windows([0, 1440]) +S_Freezer.specific_cycle_1(200, 20, 5, 10) +S_Freezer.specific_cycle_2(200, 15, 5, 15) +S_Freezer.specific_cycle_3(200, 10, 5, 20) +S_Freezer.cycle_behaviour( + [580, 1200], [0, 0], [510, 579], [0, 0], [0, 509], [1201, 1440] +) +S_TV = School.Appliance(1, 60, 2, 120, 0.1, 5, occasional_use=0.5) +S_TV.windows([510, 750], [810, 1080], 0.35) +S_DVD = School.Appliance(1, 8, 2, 120, 0.1, 5, occasional_use=0.5) +S_DVD.windows([510, 750], [810, 1080], 0.35) +S_Stereo = School.Appliance(1, 150, 2, 90, 0.1, 5, occasional_use=0.33) +S_Stereo.windows([510, 750], [810, 1080], 0.35) diff --git a/ramp/example/input_file_2.py b/ramp/example/input_file_2.py index 6c3699e7..9e9d5e3a 100644 --- a/ramp/example/input_file_2.py +++ b/ramp/example/input_file_2.py @@ -1,29 +1,28 @@ # -*- coding: utf-8 -*- -#%% Definition of the inputs -''' +# %% Definition of the inputs +""" Input data definition -''' +""" from ramp.core.core import User import pandas as pd + User_list = [] -''' +""" This example input file represents a single household user whose only load is the "shower". The example showcases how to model thermal loads by means of the thermal_P_var attribute. -''' +""" -#Create new user classes -HH = User("generic households",1) +# Create new user classes +HH = User("generic households", 1) User_list.append(HH) -HH_shower_P = pd.read_csv('ramp/example/shower_P.csv') - -#High-Income -HH_shower = HH.Appliance(1,HH_shower_P,2,15,0.1,3, thermal_P_var = 0.2) -HH_shower.windows([390,540],[1080,1200],0.2) - +HH_shower_P = pd.read_csv("ramp/example/shower_P.csv") +# High-Income +HH_shower = HH.Appliance(1, HH_shower_P, 2, 15, 0.1, 3, thermal_P_var=0.2) +HH_shower.windows([390, 540], [1080, 1200], 0.2) diff --git a/ramp/example/input_file_3.py b/ramp/example/input_file_3.py index 969c4780..95898739 100644 --- a/ramp/example/input_file_3.py +++ b/ramp/example/input_file_3.py @@ -1,77 +1,91 @@ # -*- coding: utf-8 -*- -#%% Definition of the inputs -''' +# %% Definition of the inputs +""" Input data definition -''' +""" from ramp.core.core import User User_list = [] -''' +""" This example input file represents a single household user whose only loads are the "cooking" activities. The example showcases how to model electric cooking loads by means of the Prefence Index and User Preference attributes. -''' +""" -#Create new user classes -HH = User("generic household",1,3) +# Create new user classes +HH = User("generic household", 1, 3) User_list.append(HH) -#Create new appliances - -#Create Cooking appliances - -HH_lunch1_soup = HH.Appliance(1,1800,2,70,0.15,60, thermal_P_var = 0.2, pref_index =1, fixed_cycle=1) -HH_lunch1_soup.windows([12*60,15*60],[0,0],0.15) -HH_lunch1_soup.specific_cycle_1(1800,10,750,60,0.15) -HH_lunch1_soup.cycle_behaviour([12*60,15*60],[0,0]) - -HH_lunch2_rice = HH.Appliance(1,1800,2,25,0.15,20, thermal_P_var = 0.2, pref_index = 2, fixed_cycle=1) -HH_lunch2_rice.windows([12*60,15*60],[0,0],0.15) -HH_lunch2_rice.specific_cycle_1(1800,10,750,15,0.15) -HH_lunch2_rice.cycle_behaviour([12*60,15*60],[0,0]) - -HH_lunch2_egg = HH.Appliance(1,1200,2,3,0.2,3, thermal_P_var = 0.2 , pref_index = 2) -HH_lunch2_egg.windows([12*60,15*60],[0,0],0.15) - -HH_lunch2_platano = HH.Appliance(1,1800,2,10,0.15,5, thermal_P_var = 0.2, pref_index = 2, fixed_cycle=1) -HH_lunch2_platano.windows([12*60,15*60],[0,0],0.15) -HH_lunch2_platano.specific_cycle_1(1800,5,1200,5,0.15) -HH_lunch2_platano.cycle_behaviour([12*60,15*60],[0,0]) - -HH_lunch2_meat = HH.Appliance(1,1200,2,7,0.15,3, thermal_P_var = 0.2, pref_index = 2) -HH_lunch2_meat.windows([12*60,15*60],[0,0],0.15) - -HH_lunch3_beansnrice = HH.Appliance(1,1800,2,45,0.2,30, thermal_P_var =0.2 , pref_index = 3, fixed_cycle=1) -HH_lunch3_beansnrice.windows([12*60,15*60],[0,0],0.15) -HH_lunch3_beansnrice.specific_cycle_1(1800,10,750,35,0.2) -HH_lunch3_beansnrice.cycle_behaviour([12*60,15*60],[0,0]) - -HH_lunch3_meat = HH.Appliance(1,1200,2,10,0.2,5, thermal_P_var = 0.2, pref_index = 3) -HH_lunch3_meat.windows([12*60,15*60],[0,0],0.15) - -HH_lunch_yuca = HH.Appliance(1,1800,1,25,0.15,10, thermal_P_var = 0.2, pref_index =0, fixed_cycle=1) -HH_lunch_yuca.windows([13*60,14*60],[0,0],0.15) -HH_lunch_yuca.specific_cycle_1(1800,10,750,15,0.15) -HH_lunch_yuca.cycle_behaviour([12*60,15*60],[0,0]) - -HH_breakfast_huminta = HH.Appliance(1,1800,1,65,0.15,50, thermal_P_var = 0.2, pref_index =0, fixed_cycle=1) -HH_breakfast_huminta.windows([6*60,9*60],[0,0],0.15) -HH_breakfast_huminta.specific_cycle_1(1800,5,750,60,0.15) -HH_breakfast_huminta.cycle_behaviour([6*60,9*60],[0,0]) - -HH_breakfast_bread = HH.Appliance(1,1800,1,15,0.15,10, thermal_P_var = 0.2, pref_index =0, fixed_cycle=1) -HH_breakfast_bread.windows([6*60,9*60],[0,0],0.15) -HH_breakfast_bread.specific_cycle_1(1800,10,1200,5,0.15) -HH_breakfast_bread.cycle_behaviour([6*60,9*60],[0,0]) - -HH_breakfast_coffee = HH.Appliance(1,1800,1,5,0.15,2, thermal_P_var = 0.2, pref_index =0) -HH_breakfast_coffee.windows([6*60,9*60],[0,0],0.15) - -HH_mate = HH.Appliance(1,1800,1,30,0.3,2, thermal_P_var = 0.2, pref_index =0) -HH_mate.windows([7*60,20*60],[0,0],0.15) - - +# Create new appliances + +# Create Cooking appliances + +HH_lunch1_soup = HH.Appliance( + 1, 1800, 2, 70, 0.15, 60, thermal_P_var=0.2, pref_index=1, fixed_cycle=1 +) +HH_lunch1_soup.windows([12 * 60, 15 * 60], [0, 0], 0.15) +HH_lunch1_soup.specific_cycle_1(1800, 10, 750, 60, 0.15) +HH_lunch1_soup.cycle_behaviour([12 * 60, 15 * 60], [0, 0]) + +HH_lunch2_rice = HH.Appliance( + 1, 1800, 2, 25, 0.15, 20, thermal_P_var=0.2, pref_index=2, fixed_cycle=1 +) +HH_lunch2_rice.windows([12 * 60, 15 * 60], [0, 0], 0.15) +HH_lunch2_rice.specific_cycle_1(1800, 10, 750, 15, 0.15) +HH_lunch2_rice.cycle_behaviour([12 * 60, 15 * 60], [0, 0]) + +HH_lunch2_egg = HH.Appliance(1, 1200, 2, 3, 0.2, 3, thermal_P_var=0.2, pref_index=2) +HH_lunch2_egg.windows([12 * 60, 15 * 60], [0, 0], 0.15) + +HH_lunch2_platano = HH.Appliance( + 1, 1800, 2, 10, 0.15, 5, thermal_P_var=0.2, pref_index=2, fixed_cycle=1 +) +HH_lunch2_platano.windows([12 * 60, 15 * 60], [0, 0], 0.15) +HH_lunch2_platano.specific_cycle_1(1800, 5, 1200, 5, 0.15) +HH_lunch2_platano.cycle_behaviour([12 * 60, 15 * 60], [0, 0]) + +HH_lunch2_meat = HH.Appliance(1, 1200, 2, 7, 0.15, 3, thermal_P_var=0.2, pref_index=2) +HH_lunch2_meat.windows([12 * 60, 15 * 60], [0, 0], 0.15) + +HH_lunch3_beansnrice = HH.Appliance( + 1, 1800, 2, 45, 0.2, 30, thermal_P_var=0.2, pref_index=3, fixed_cycle=1 +) +HH_lunch3_beansnrice.windows([12 * 60, 15 * 60], [0, 0], 0.15) +HH_lunch3_beansnrice.specific_cycle_1(1800, 10, 750, 35, 0.2) +HH_lunch3_beansnrice.cycle_behaviour([12 * 60, 15 * 60], [0, 0]) + +HH_lunch3_meat = HH.Appliance(1, 1200, 2, 10, 0.2, 5, thermal_P_var=0.2, pref_index=3) +HH_lunch3_meat.windows([12 * 60, 15 * 60], [0, 0], 0.15) + +HH_lunch_yuca = HH.Appliance( + 1, 1800, 1, 25, 0.15, 10, thermal_P_var=0.2, pref_index=0, fixed_cycle=1 +) +HH_lunch_yuca.windows([13 * 60, 14 * 60], [0, 0], 0.15) +HH_lunch_yuca.specific_cycle_1(1800, 10, 750, 15, 0.15) +HH_lunch_yuca.cycle_behaviour([12 * 60, 15 * 60], [0, 0]) + +HH_breakfast_huminta = HH.Appliance( + 1, 1800, 1, 65, 0.15, 50, thermal_P_var=0.2, pref_index=0, fixed_cycle=1 +) +HH_breakfast_huminta.windows([6 * 60, 9 * 60], [0, 0], 0.15) +HH_breakfast_huminta.specific_cycle_1(1800, 5, 750, 60, 0.15) +HH_breakfast_huminta.cycle_behaviour([6 * 60, 9 * 60], [0, 0]) + +HH_breakfast_bread = HH.Appliance( + 1, 1800, 1, 15, 0.15, 10, thermal_P_var=0.2, pref_index=0, fixed_cycle=1 +) +HH_breakfast_bread.windows([6 * 60, 9 * 60], [0, 0], 0.15) +HH_breakfast_bread.specific_cycle_1(1800, 10, 1200, 5, 0.15) +HH_breakfast_bread.cycle_behaviour([6 * 60, 9 * 60], [0, 0]) + +HH_breakfast_coffee = HH.Appliance( + 1, 1800, 1, 5, 0.15, 2, thermal_P_var=0.2, pref_index=0 +) +HH_breakfast_coffee.windows([6 * 60, 9 * 60], [0, 0], 0.15) + +HH_mate = HH.Appliance(1, 1800, 1, 30, 0.3, 2, thermal_P_var=0.2, pref_index=0) +HH_mate.windows([7 * 60, 20 * 60], [0, 0], 0.15) diff --git a/ramp/post_process/post_process.py b/ramp/post_process/post_process.py index 4b5bdc09..791f6604 100644 --- a/ramp/post_process/post_process.py +++ b/ramp/post_process/post_process.py @@ -9,64 +9,72 @@ BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Post-processing -''' +""" Just some additional code lines to calculate useful indicators and generate plots -''' +""" + + def Profile_formatting(stoch_profiles): Profile_avg = np.zeros(1440) for pr in stoch_profiles: Profile_avg = Profile_avg + pr - Profile_avg = Profile_avg/len(stoch_profiles) + Profile_avg = Profile_avg / len(stoch_profiles) Profile_kW = [] for kW in stoch_profiles: - Profile_kW.append(kW/1000) + Profile_kW.append(kW / 1000) Profile_series = np.array([]) for iii in stoch_profiles: - Profile_series = np.append(Profile_series,iii) + Profile_series = np.append(Profile_series, iii) return (Profile_avg, Profile_kW, Profile_series) -def Profile_cloud_plot(stoch_profiles,stoch_profiles_avg): - #x = np.arange(0,1440,5) - plt.figure(figsize=(10,5)) + +def Profile_cloud_plot(stoch_profiles, stoch_profiles_avg): + # x = np.arange(0,1440,5) + plt.figure(figsize=(10, 5)) for n in stoch_profiles: - plt.plot(np.arange(1440),n,'#b0c4de') - plt.xlabel('Time (hours)') - plt.ylabel('Power (W)') + plt.plot(np.arange(1440), n, "#b0c4de") + plt.xlabel("Time (hours)") + plt.ylabel("Power (W)") plt.ylim(ymin=0) - #plt.ylim(ymax=5000) + # plt.ylim(ymax=5000) plt.margins(x=0) plt.margins(y=0) - plt.plot(np.arange(1440),stoch_profiles_avg,'#4169e1') - plt.xticks([0,240,480,(60*12),(60*16),(60*20),(60*24)],[0,4,8,12,16,20,24]) - #plt.savefig('profiles.eps', format='eps', dpi=1000) + plt.plot(np.arange(1440), stoch_profiles_avg, "#4169e1") + plt.xticks( + [0, 240, 480, (60 * 12), (60 * 16), (60 * 20), (60 * 24)], + [0, 4, 8, 12, 16, 20, 24], + ) + # plt.savefig('profiles.eps', format='eps', dpi=1000) plt.show() def Profile_series_plot(stoch_profiles_series): - #x = np.arange(0,1440,5) - plt.figure(figsize=(10,5)) - plt.plot(np.arange(len(stoch_profiles_series)),stoch_profiles_series,'#4169e1') - #plt.xlabel('Time (hours)') - plt.ylabel('Power (W)') + # x = np.arange(0,1440,5) + plt.figure(figsize=(10, 5)) + plt.plot(np.arange(len(stoch_profiles_series)), stoch_profiles_series, "#4169e1") + # plt.xlabel('Time (hours)') + plt.ylabel("Power (W)") plt.ylim(ymin=0) - #plt.ylim(ymax=5000) + # plt.ylim(ymax=5000) plt.margins(x=0) plt.margins(y=0) - #plt.xticks([0,240,480,(60*12),(60*16),(60*20),(60*24)],[0,4,8,12,16,20,24]) - #plt.savefig('profiles.eps', format='eps', dpi=1000) + # plt.xticks([0,240,480,(60*12),(60*16),(60*20),(60*24)],[0,4,8,12,16,20,24]) + # plt.savefig('profiles.eps', format='eps', dpi=1000) plt.show() + # Export individual profiles -''' +""" for i in range (len(Profile)): np.save('p0%d.npy' % (i), Profile[i]) -''' +""" # Export Profiles + def export_series(stoch_profiles_series, j=None, fname=None, ofname=None): series_frame = pd.DataFrame(stoch_profiles_series) path_to_write = None @@ -91,44 +99,46 @@ def export_series(stoch_profiles_series, j=None, fname=None, ofname=None): print("No path to a file was provided to write the results") -valid_units = ('kW',"W","MW","GW","TW") -class Run: +valid_units = ("kW", "W", "MW", "GW", "TW") - def __init__(self,user,unit,calendar_years): +class Run: + def __init__(self, user, unit, calendar_years): self.user = user self.unit = unit - if isinstance(calendar_years,int): + if isinstance(calendar_years, int): calendar_years = [calendar_years] self.calendar_years = calendar_years datatimeIndexes = [ - pd.date_range(start=f"{year}-01-01",periods=365*60*24,freq="1min",name = "date") + pd.date_range( + start=f"{year}-01-01", periods=365 * 60 * 24, freq="1min", name="date" + ) for year in self.calendar_years ] self._datetimeIndex = datatimeIndexes[0] - for year,Index in enumerate(datatimeIndexes): + for year, Index in enumerate(datatimeIndexes): if year != 0: - self._datetimeIndex=self._datetimeIndex.append(Index) + self._datetimeIndex = self._datetimeIndex.append(Index) @property def unit(self): return self._unit @unit.setter - def unit(self,var): + def unit(self, var): if var not in valid_units: raise ValueError(f"valid units are: {valid_units}") self._unit = var - - def _get_DatetimeIndex(self,starting_day,n_days): - - request = pd.date_range(start=starting_day,periods=n_days*24*60,freq="1min") + def _get_DatetimeIndex(self, starting_day, n_days): + request = pd.date_range( + start=starting_day, periods=n_days * 24 * 60, freq="1min" + ) request_intersection = self._datetimeIndex.intersection(request) @@ -137,27 +147,23 @@ def _get_DatetimeIndex(self,starting_day,n_days): return request - def generate_profiles(self,starting_day,n_days,peak_time_range,columns=["Baseline"]): - idx = self._get_DatetimeIndex(starting_day,n_days) + def generate_profiles( + self, starting_day, n_days, peak_time_range, columns=["Baseline"] + ): + idx = self._get_DatetimeIndex(starting_day, n_days) results = {} for profile in columns: profiles = [] - for prof_i in idx.day_of_year.unique(): - + for prof_i in idx.day_of_year.unique(): result = self.user.generate_aggregated_load_profile( - prof_i = prof_i, - day_type = 1, - peak_time_range = peak_time_range + prof_i=prof_i, day_type=1, peak_time_range=peak_time_range ) profiles.extend(result.tolist()) - results[profile] = pd.Series(index=idx,data=profiles) - - - return Plot(pd.concat(results,axis=1)) - + results[profile] = pd.Series(index=idx, data=profiles) + return Plot(pd.concat(results, axis=1)) class Plot: @@ -178,8 +184,9 @@ class Plot: columns : Index or list-like Column labels to use for resulting frame when representing the simulation cases """ + @classmethod - def from_file(self,file,sheet_name=0,sep=",",index=None): + def from_file(self, file, sheet_name=0, sep=",", index=None): """initializing a Plot object from a file results Parameters @@ -200,20 +207,21 @@ def from_file(self,file,sheet_name=0,sep=",",index=None): """ if file.endswith(".csv"): - df = pd.read_csv(file,sheet_name=sheet_name,index_col=0,header=0,sep=sep) + df = pd.read_csv( + file, sheet_name=sheet_name, index_col=0, header=0, sep=sep + ) elif file.endswith(".xlsx"): - - df = pd.read_excel(file,sheet_name=sheet_name,index_col=0,header=0) + df = pd.read_excel(file, sheet_name=sheet_name, index_col=0, header=0) else: - raise ValueError("unkwnon format specified for the file. Only .csv or .xlsx formats are allowed.") - - return Plot(df,index) - + raise ValueError( + "unkwnon format specified for the file. Only .csv or .xlsx formats are allowed." + ) + return Plot(df, index) - def __init__(self,df,index=None): + def __init__(self, df, index=None): """initializes a Plot object using a pd.DataFrame Parameters @@ -230,11 +238,9 @@ def __init__(self,df,index=None): self.DataFrame = df - @property def freq(self): - """return the frequency of the pd.DatetimeIndex - """ + """return the frequency of the pd.DatetimeIndex""" return self.df.index.freq @property @@ -249,14 +255,14 @@ def columns(self): return self.df.columns.tolist() @columns.setter - def columns(self,var): + def columns(self, var): self.df.columns = var @property def index(self): return self.df.index - def resample(self,freq,rule,conversion=1,inplace=False): + def resample(self, freq, rule, conversion=1, inplace=False): """returns a resampled version of the data Parameters @@ -277,8 +283,8 @@ def resample(self,freq,rule,conversion=1,inplace=False): """ df = self.df.copy() - df = df/conversion - df = getattr(df.resample(freq),rule)() + df = df / conversion + df = getattr(df.resample(freq), rule)() if inplace: self.df = df @@ -286,7 +292,7 @@ def resample(self,freq,rule,conversion=1,inplace=False): else: return Plot(df) - def line(self,columns=None,engine="matplotlib",**kwargs): + def line(self, columns=None, engine="matplotlib", **kwargs): """creating a like plot Parameters @@ -311,20 +317,24 @@ def line(self,columns=None,engine="matplotlib",**kwargs): df = self.df[columns] if engine == "plotly": - fig = px.line(df) fig.update_layout(**kwargs) return fig elif engine == "matplotlib": - - ax = df.plot(kind="line",**kwargs) + ax = df.plot(kind="line", **kwargs) return ax - - def shadow(self,main_column=None,columns="all",average=True,engine="matplotlib",**kwargs): + def shadow( + self, + main_column=None, + columns="all", + average=True, + engine="matplotlib", + **kwargs, + ): """creating a shadow plot Parameters @@ -347,13 +357,15 @@ def shadow(self,main_column=None,columns="all",average=True,engine="matplotlib", engine = self._check_engine(engine) if main_column is None and average == False: - raise ValueError("one of columns should be passed as the main_column when the average = False") + raise ValueError( + "one of columns should be passed as the main_column when the average = False" + ) elif main_column is not None and average == True: raise ValueError("main_column cannot be given when average = True") elif main_column is not None and average == False: - df_main = (self.df.copy()[main_column]).to_frame(main_column) + df_main = (self.df.copy()[main_column]).to_frame(main_column) else: df_main = (self.mean()).df["Mean"].to_frame("Average") @@ -363,54 +375,52 @@ def shadow(self,main_column=None,columns="all",average=True,engine="matplotlib", df_other = self.df.copy()[columns] - if isinstance(df_other,pd.Series): + if isinstance(df_other, pd.Series): df_other = df_other.to_frame(columns) if engine == "matplotlib": fig = plt.figure(**kwargs) ax = fig.add_subplot(1, 1, 1) - ax.plot(df_main.index, df_main.values,color="black",label=df_main.columns[0]) + ax.plot( + df_main.index, df_main.values, color="black", label=df_main.columns[0] + ) - ax.plot(df_other.index,df_other.values,alpha=0.3,color="black") + ax.plot(df_other.index, df_other.values, alpha=0.3, color="black") ax.legend() - return fig,ax + return fig, ax elif engine == "plotly": fig = go.Figure() fig.add_trace( go.Scatter( - x = df_main.index, - y = df_main.values.ravel(), - mode = "lines", - name = df_main.columns[0], - line = dict(color = "rgb(0,0,0)") + x=df_main.index, + y=df_main.values.ravel(), + mode="lines", + name=df_main.columns[0], + line=dict(color="rgb(0,0,0)"), ) ) - for col,vals in df_other.items(): - + for col, vals in df_other.items(): fig.add_trace( go.Scatter( - x = vals.index, - y = vals.values.ravel(), - mode = "lines", + x=vals.index, + y=vals.values.ravel(), + mode="lines", showlegend=False, - line = dict(color = "rgba(0,0,0,0.15)") + line=dict(color="rgba(0,0,0,0.15)"), ) ) - - fig.update_layout(**kwargs) return fig - - def area(self,columns=None,engine="matplotlib",**kwargs): + def area(self, columns=None, engine="matplotlib", **kwargs): """an area plot Parameters @@ -434,17 +444,15 @@ def area(self,columns=None,engine="matplotlib",**kwargs): df = self.df[columns] if engine == "matplotlib": - return df.plot(kind = "area",**kwargs) + return df.plot(kind="area", **kwargs) elif engine == "plotly": - - fig = px.area(df) + fig = px.area(df) fig.update_layout(**kwargs) return fig - - def load_duration_curve(self,column,engine="matplotlib",**kwargs): + def load_duration_curve(self, column, engine="matplotlib", **kwargs): """plots the load duration curve Parameters @@ -464,24 +472,21 @@ def load_duration_curve(self,column,engine="matplotlib",**kwargs): df = self.df[[column]] - - df = df.sort_values(by=column,ascending=False) - df.index = [i for i in range(1,len(df.index)+1)] + df = df.sort_values(by=column, ascending=False) + df.index = [i for i in range(1, len(df.index) + 1)] if engine == "plotly": + fig = px.line(df) + fig.update_layout(**kwargs) - fig = px.line(df) - fig.update_layout(**kwargs) - - return fig + return fig elif engine == "matplotlib": - - ax = df.plot(kind="line",**kwargs) + ax = df.plot(kind="line", **kwargs) return ax - def error(self,base_column,validated_data): + def error(self, base_column, validated_data): """returns the error Parameters @@ -497,12 +502,15 @@ def error(self,base_column,validated_data): error in each time slice """ - er = (self.df[base_column] - self.df[validated_data])/self.df[validated_data].values * 100 + er = ( + (self.df[base_column] - self.df[validated_data]) + / self.df[validated_data].values + * 100 + ) er = er.fillna(0) return er - @property def peak(self): """a dict with all peak hours for each column of the pd.DataFrame @@ -515,34 +523,31 @@ def peak(self): output = {} - for col,vals in self.df.items(): + for col, vals in self.df.items(): max = vals.loc[vals == vals.max()] output[col] = max return output - def __repr__(self): return str(self) def __str__(self) -> str: - return (self.DataFrame.head(10).to_string() + "\n ......") + return self.DataFrame.head(10).to_string() + "\n ......" @property def DataFrame(self): - """returns the data of the Plot object - """ + """returns the data of the Plot object""" return self.df @DataFrame.setter - def DataFrame(self,var): - - self._validate_df(var,check_index=False) + def DataFrame(self, var): + self._validate_df(var, check_index=False) self.df = var - def add_column(self,var): + def add_column(self, var): """adds new column to the data Parameters @@ -550,22 +555,20 @@ def add_column(self,var): var : pd.DataFrame a pd.DataFrame with similar index to the main dataset """ - if isinstance(var,Plot): + if isinstance(var, Plot): var = var.DataFrame self._validate_df(var) self.df[var.columns] = var.values - - def __getitem__(self,key): - - if isinstance(key,str): + def __getitem__(self, key): + if isinstance(key, str): key = [key] return Plot(self.DataFrame[key]) - def loc(self,index=slice(None),columns=slice(None)): + def loc(self, index=slice(None), columns=slice(None)): """loc method to filter the data Parameters @@ -581,9 +584,9 @@ def loc(self,index=slice(None),columns=slice(None)): a Plot object using the index and columns filters """ - return Plot(self.DataFrame.loc[index,columns]) + return Plot(self.DataFrame.loc[index, columns]) - def iloc(self,index=slice(None),columns=slice(None)): + def iloc(self, index=slice(None), columns=slice(None)): """iloc method to filter the data based on position index Parameters @@ -599,9 +602,9 @@ def iloc(self,index=slice(None),columns=slice(None)): a Plot object using the index and columns filters """ - return Plot(self.DataFrame.iloc[index,columns]) + return Plot(self.DataFrame.iloc[index, columns]) - def head(self,var): + def head(self, var): """returns the top var numbers of data Parameters @@ -616,7 +619,7 @@ def head(self,var): """ return Plot(self.DataFrame.head(var)) - def plot(self,**kwargs): + def plot(self, **kwargs): """returns a pd.DataFrame.plot object Returns @@ -625,18 +628,19 @@ def plot(self,**kwargs): """ return self.DataFrame.plot(**kwargs) - def _validate_df(self,other,check_index=True): - if not isinstance(other,pd.DataFrame): + def _validate_df(self, other, check_index=True): + if not isinstance(other, pd.DataFrame): raise ValueError("only pd.DataFrame object is allowed") - if not(isinstance,other.index,pd.DatetimeIndex): + if not (isinstance, other.index, pd.DatetimeIndex): raise ValueError("a valid dataframe shoud has only a pd.DatatimeIndex") if check_index: if not self.index.equals(other.index): - raise ValueError("the new column should have identical index with the existing DataFrame") - + raise ValueError( + "the new column should have identical index with the existing DataFrame" + ) - def to_excel(self,path): + def to_excel(self, path): """saves the data into excel Parameters @@ -648,7 +652,7 @@ def to_excel(self,path): with pd.ExcelWriter(path) as file: self.DataFrame.to_excel(file) - def to_csv(self,path,sep=','): + def to_csv(self, path, sep=","): """saves the data into csv Parameters @@ -658,7 +662,7 @@ def to_csv(self,path,sep=','): sep : str,optional csv separator """ - self.DataFrame.to_csv(path,sep=sep) + self.DataFrame.to_csv(path, sep=sep) def mean(self): """returns the mean of the columns @@ -681,19 +685,16 @@ def sum(self): return Plot(self.DataFrame.sum(1).to_frame("Sum")) def copy(self): - """returns a copy of the existing object - """ + """returns a copy of the existing object""" return Plot(self.df.copy()) - def _check_engine(self,engine): - + def _check_engine(self, engine): if engine.lower() == "matplotlib": - return "matplotlib" elif engine.lower() == "plotly": - return "plotly" - raise ValueError(f"{engine} is not a valid plot engine. Only 'Plotly' and 'matplotlib are valid.'") - + raise ValueError( + f"{engine} is not a valid plot engine. Only 'Plotly' and 'matplotlib are valid.'" + ) diff --git a/ramp/ramp_convert_old_input_files.py b/ramp/ramp_convert_old_input_files.py index d5d0948e..f29ab6fe 100644 --- a/ramp/ramp_convert_old_input_files.py +++ b/ramp/ramp_convert_old_input_files.py @@ -1,4 +1,4 @@ -#%% Import required modules +# %% Import required modules import sys, os, importlib @@ -28,7 +28,7 @@ dest="suffix", type=str, help="suffix appended to the converted filename", - default="" + default="", ) @@ -60,7 +60,9 @@ def convert_old_user_input_file( if "from core import" in l: line_to_change = i # Change import statement by explicitly naming the full package path - lines[line_to_change] = lines[line_to_change].replace("from core import", "from ramp.core.core import") + lines[line_to_change] = lines[line_to_change].replace( + "from core import", "from ramp.core.core import" + ) # Modify import statement in file if line_to_change != -1: @@ -89,7 +91,6 @@ def convert_old_user_input_file( if __name__ == "__main__": - args = vars(parser.parse_args()) fname = args["fname_path"] output_path = args.get("output_path") diff --git a/ramp/ramp_run.py b/ramp/ramp_run.py index 96a8773a..2337eb80 100644 --- a/ramp/ramp_run.py +++ b/ramp/ramp_run.py @@ -21,13 +21,13 @@ under the License. """ -#%% Import required modules +# %% Import required modules -import sys,os +import sys, os import numpy as np -sys.path.append('../') +sys.path.append("../") try: from .core.utils import get_day_type, yearly_pattern @@ -39,38 +39,75 @@ from post_process import post_process as pp -def run_usecase(j=None, fname=None, ofname=None, num_profiles=None, days=None, plot=True, parallel=False): - +def run_usecase( + j=None, + fname=None, + ofname=None, + num_profiles=None, + days=None, + plot=True, + parallel=False, +): # Calls the stochastic process and saves the result in a list of stochastic profiles if days is None: - Profiles_list = stochastic_process(j=j, fname=fname, num_profiles=num_profiles, day_type=yearly_pattern(), parallel=parallel) + Profiles_list = stochastic_process( + j=j, + fname=fname, + num_profiles=num_profiles, + day_type=yearly_pattern(), + parallel=parallel, + ) # Post-processes the results and generates plots - Profiles_avg, Profiles_list_kW, Profiles_series = pp.Profile_formatting(Profiles_list) - pp.Profile_series_plot(Profiles_series) # by default, profiles are plotted as a series + Profiles_avg, Profiles_list_kW, Profiles_series = pp.Profile_formatting( + Profiles_list + ) + pp.Profile_series_plot( + Profiles_series + ) # by default, profiles are plotted as a series pp.export_series(Profiles_series, j, fname, ofname) - if len(Profiles_list) > 1: # if more than one daily profile is generated, also cloud plots are shown + if ( + len(Profiles_list) > 1 + ): # if more than one daily profile is generated, also cloud plots are shown pp.Profile_cloud_plot(Profiles_list, Profiles_avg) else: Profiles_list = [] if parallel is True: - Profiles_list = stochastic_process(j=j, fname=fname, num_profiles=len(days), day_type=[get_day_type(day)for day in days], parallel=parallel) + Profiles_list = stochastic_process( + j=j, + fname=fname, + num_profiles=len(days), + day_type=[get_day_type(day) for day in days], + parallel=parallel, + ) else: for day in days: print("Day", day) - daily_profiles = stochastic_process(j=j, fname=fname, num_profiles=num_profiles, day_type=get_day_type(day), parallel=parallel) + daily_profiles = stochastic_process( + j=j, + fname=fname, + num_profiles=num_profiles, + day_type=get_day_type(day), + parallel=parallel, + ) Profiles_list.append(np.mean(daily_profiles, axis=0)) if plot is True: # Post-processes the results and generates plots - Profiles_avg, Profiles_list_kW, Profiles_series = pp.Profile_formatting(Profiles_list) - pp.Profile_series_plot(Profiles_series) # by default, profiles are plotted as a series + Profiles_avg, Profiles_list_kW, Profiles_series = pp.Profile_formatting( + Profiles_list + ) + pp.Profile_series_plot( + Profiles_series + ) # by default, profiles are plotted as a series pp.export_series(Profiles_series, j, fname, ofname) - if len(Profiles_list) > 1: # if more than one daily profile is generated, also cloud plots are shown + if ( + len(Profiles_list) > 1 + ): # if more than one daily profile is generated, also cloud plots are shown pp.Profile_cloud_plot(Profiles_list, Profiles_avg) else: return Profiles_list @@ -80,11 +117,11 @@ def run_usecase(j=None, fname=None, ofname=None, num_profiles=None, days=None, p if __name__ == "__main__": - for j in input_files_to_run: try: - run_usecase(j=j, fname='../example/input_file_{}.xlsx'.format(j)) + run_usecase(j=j, fname="../example/input_file_{}.xlsx".format(j)) except: - print('Input files in .xlsx format not found. Running the default files in .py format.') + print( + "Input files in .xlsx format not found. Running the default files in .py format." + ) run_usecase(j=j) - \ No newline at end of file diff --git a/ramp/test/test_run.py b/ramp/test/test_run.py index 699f5f02..9717fbb1 100644 --- a/ramp/test/test_run.py +++ b/ramp/test/test_run.py @@ -5,65 +5,84 @@ as expected or not is left to the developers """ -#%% Import required modules +# %% Import required modules import pandas as pd import matplotlib.pyplot as plt import os -#%% Function to test the output against reference results -''' + +# %% Function to test the output against reference results +""" By default, reference results are provided for a series of 30 days and for all th 3 reference input files. If this is changed, the function below must be called for the correct number of input files and days through the related parameters -''' +""" + -def test_output(results_folder,test_folder, num_input_files=3, num_days=30): - +def test_output(results_folder, test_folder, num_input_files=3, num_days=30): def series_to_average(profile_series, num_days): - average_series = profile_series[0:1440] - for d in range(1,num_days): - average_series = average_series + profile_series[0+1440*d:1440+1440*d].set_index(average_series.index) - average_series = average_series/ (d+1) - + for d in range(1, num_days): + average_series = average_series + profile_series[ + 0 + 1440 * d : 1440 + 1440 * d + ].set_index(average_series.index) + average_series = average_series / (d + 1) + return average_series - + default_out = {} current_out = {} axes = {} - - fig = plt.figure(figsize=(14,14)) - - for n in range(1,4): - - if n < num_input_files+1: - default_out[n] = series_to_average(pd.read_csv('{}/output_file_{}.csv'.format(test_folder,n), index_col=0), num_days) - current_out[n] = series_to_average(pd.read_csv('{}/output_file_{}.csv'.format(results_folder,n), index_col=0), num_days) - - axes[n] = fig.add_subplot((310+n)) - - axes[n].plot(default_out[n], color='red', linewidth=2, label='default') - axes[n].plot(current_out[n], color='blue',linewidth=1, alpha=0.8, linestyle='--', label='new results') + + fig = plt.figure(figsize=(14, 14)) + + for n in range(1, 4): + if n < num_input_files + 1: + default_out[n] = series_to_average( + pd.read_csv( + "{}/output_file_{}.csv".format(test_folder, n), index_col=0 + ), + num_days, + ) + current_out[n] = series_to_average( + pd.read_csv( + "{}/output_file_{}.csv".format(results_folder, n), index_col=0 + ), + num_days, + ) + + axes[n] = fig.add_subplot((310 + n)) + + axes[n].plot(default_out[n], color="red", linewidth=2, label="default") + axes[n].plot( + current_out[n], + color="blue", + linewidth=1, + alpha=0.8, + linestyle="--", + label="new results", + ) axes[n].set_xmargin(0) axes[n].set_ymargin(0) - axes[n].set_ylabel('Power demand (kW)') + axes[n].set_ylabel("Power demand (kW)") else: - axes[n] = fig.add_subplot((310+n)) + axes[n] = fig.add_subplot((310 + n)) axes[n].set_xmargin(0) axes[n].set_ymargin(0) - axes[n].get_shared_x_axes().join(axes[n], axes[n-1], axes[n-2]) - axes[n-1].legend() - axes[n-1].set_xticklabels([]) - axes[n-2].set_xticklabels([]) - -#%% Testing the output and providing visual result -''' + axes[n].get_shared_x_axes().join(axes[n], axes[n - 1], axes[n - 2]) + axes[n - 1].legend() + axes[n - 1].set_xticklabels([]) + axes[n - 2].set_xticklabels([]) + + +# %% Testing the output and providing visual result +""" Here the visual comparison between default and new/current results occurs. Besides the difference naturally occurring due to different realisations of stochastic parameters, the developers should check whether any other differences are brought by by the tested code changes. If any differences are there, the developers should evaluate whether these are as expected/designed or not -''' +""" -test_output('../results','../test', num_input_files=3) \ No newline at end of file +test_output("../results", "../test", num_input_files=3) diff --git a/setup.py b/setup.py index d4a6d502..0fb008d4 100644 --- a/setup.py +++ b/setup.py @@ -1,28 +1,27 @@ - -from setuptools import find_packages,setup +from setuptools import find_packages, setup exec(open("ramp/_version.py").read()) setup( - name = "rampdemand", - description= "An open-source python package for building bottom-up stochastic model for generating multi-energy load profiles", - long_description = open("README.rst",encoding="utf8").read(), - author_email = "f.lombardi@tudelft.nl", - url= "https://github.com/RAMP-project/RAMP", - version = __version__, - packages= find_packages(), + name="rampdemand", + description="An open-source python package for building bottom-up stochastic model for generating multi-energy load profiles", + long_description=open("README.rst", encoding="utf8").read(), + author_email="f.lombardi@tudelft.nl", + url="https://github.com/RAMP-project/RAMP", + version=__version__, + packages=find_packages(), license="European Union Public License 1.2", - python_requires = ">=3.6.0", - package_data={"": ["*.txt", "*.dat", "*.doc", "*.rst","*.xlsx","*.csv"]}, - install_requires = [ + python_requires=">=3.6.0", + package_data={"": ["*.txt", "*.dat", "*.doc", "*.rst", "*.xlsx", "*.csv"]}, + install_requires=[ "pandas >= 1.3.3", "numpy >= 1.21.2", "xlsxwriter >= 1.3.7", "matplotlib >= 3.3.4", "openpyxl >= 3.0.6", "tqdm", - "plotly" + "plotly", ], # classifiers=[ # "Programming Language :: Python :: 3.7", @@ -36,11 +35,9 @@ # "Programming Language :: Python", # "Topic :: Scientific/Engineering", # ], - entry_points={ - "console_scripts": [ - "ramp=ramp.cli:main", - ], -}, - + "console_scripts": [ + "ramp=ramp.cli:main", + ], + }, ) diff --git a/tests/test_calc_peak_time_range.py b/tests/test_calc_peak_time_range.py index 61612aef..6c90a07e 100644 --- a/tests/test_calc_peak_time_range.py +++ b/tests/test_calc_peak_time_range.py @@ -10,7 +10,7 @@ import scipy.stats as stats from ramp.core.initialise import user_defined_inputs from ramp.core.stochastic_process import calc_peak_time_range - + def test_peak_time_range_values(): """ @@ -25,22 +25,22 @@ def test_peak_time_range_values(): """ user_list = user_defined_inputs(j=1, fname=None) - + num_repetitions = 100 # Set the desired number of repetitions results = [] - + for _ in range(num_repetitions): peak_time_range = calc_peak_time_range(user_list, peak_enlarge=0.15) results.append(peak_time_range) - + statistics_dict = {} - + # Performing statistical analysis on the results for i, arr in enumerate(results): - statistics_dict[i] = {'mean': np.mean(arr)} + statistics_dict[i] = {"mean": np.mean(arr)} # Extract the mean values - mean_sample = [inner_dict['mean'] for inner_dict in statistics_dict.values()] + mean_sample = [inner_dict["mean"] for inner_dict in statistics_dict.values()] # Perform the normality test (Shapiro-Wilk in this sample) _, p_value = stats.shapiro(mean_sample) assert p_value > 0.05 diff --git a/tests/test_input_file_conversion.py b/tests/test_input_file_conversion.py index b327d9c7..07c1f9ab 100644 --- a/tests/test_input_file_conversion.py +++ b/tests/test_input_file_conversion.py @@ -9,9 +9,7 @@ def load_usecase(j=None, fname=None): - peak_enlarge, user_list, num_profiles = initialise_inputs( - j, fname, num_profiles=1 - ) + peak_enlarge, user_list, num_profiles = initialise_inputs(j, fname, num_profiles=1) return user_list @@ -20,11 +18,11 @@ def setup_method(self): self.input_files_to_run = [1, 2, 3] self.file_suffix = "_test" self.py_fnames = [ - os.path.join("ramp","example", f"input_file_{i}.py") + os.path.join("ramp", "example", f"input_file_{i}.py") for i in self.input_files_to_run ] self.xlsx_fnames = [ - os.path.join("ramp","test", f"input_file_{i}{self.file_suffix}.xlsx") + os.path.join("ramp", "test", f"input_file_{i}{self.file_suffix}.xlsx") for i in self.input_files_to_run ] for fname in self.xlsx_fnames: @@ -53,7 +51,9 @@ def test_convert_py_to_xlsx(self): for i, j in enumerate(self.input_files_to_run): old_user_list = load_usecase(j=j) convert_old_user_input_file( - self.py_fnames[i], output_path=os.path.join("ramp","test"), suffix=self.file_suffix + self.py_fnames[i], + output_path=os.path.join("ramp", "test"), + suffix=self.file_suffix, ) new_user_list = load_usecase(fname=self.xlsx_fnames[i]) for old_user, new_user in zip(old_user_list, new_user_list): @@ -133,11 +133,18 @@ def test_A(): appliance1 = user.Appliance(user, **old_params) appliance1.windows(window_1=[win_start, win_stop]) - params = dict(number=1, power=200, num_windows=1, func_time=0, window_1=np.array([win_start, win_stop])) + params = dict( + number=1, + power=200, + num_windows=1, + func_time=0, + window_1=np.array([win_start, win_stop]), + ) appliance2 = user.add_appliance(**params) assert appliance1 == appliance2 + def test_B(): user = User("test user", 1) @@ -157,4 +164,4 @@ def test_B(): params.update(cycle_params) appliance2 = user.add_appliance(**params) - assert appliance1 == appliance2 \ No newline at end of file + assert appliance1 == appliance2 diff --git a/tests/test_rand_total_time_of_use.py b/tests/test_rand_total_time_of_use.py index e4e3c99f..6258d29a 100644 --- a/tests/test_rand_total_time_of_use.py +++ b/tests/test_rand_total_time_of_use.py @@ -15,47 +15,50 @@ @pytest.fixture def appliance_instance(): - # Create a User instance (you may need to provide the required arguments for User) user = User(user_name="Test User", num_users=1) appliance = Appliance( - user="Test User", - name="Test Appliance", - func_time=100, # Set an appropriate func_time - func_cycle=20, - time_fraction_random_variability=0.1 - ) + user="Test User", + name="Test Appliance", + func_time=100, # Set an appropriate func_time + func_cycle=20, + time_fraction_random_variability=0.1, + ) return appliance - + # Define the test class for the Appliance class class TestAppliance: - - #Test that the method returns an integer - @pytest.mark.usefixtures("appliance_instance") + # Test that the method returns an integer + @pytest.mark.usefixtures("appliance_instance") def test_returns_integer_value(self, appliance_instance): - result = appliance_instance.rand_total_time_of_use([0, 480], [600, 1080], [1200, 1440]) + result = appliance_instance.rand_total_time_of_use( + [0, 480], [600, 1080], [1200, 1440] + ) assert isinstance(result, int) - - #Tests that the method returns a value greater than func_cycle - @pytest.mark.usefixtures("appliance_instance") + + # Tests that the method returns a value greater than func_cycle + @pytest.mark.usefixtures("appliance_instance") def test_rand_time_equal_or_greater_than_func_cycle(self, appliance_instance): # Define windows with total available time rand_window_1 = [0, 100] rand_window_2 = [200, 300] rand_window_3 = [400, 500] appliance_instance.func_cycle = 50 - # Generate a sample of 'rand_time' values + # Generate a sample of 'rand_time' values sample_size = 100 rand_time_sample = [] for _ in range(sample_size): - rand_time = appliance_instance.rand_total_time_of_use(rand_window_1, rand_window_2, rand_window_3) + rand_time = appliance_instance.rand_total_time_of_use( + rand_window_1, rand_window_2, rand_window_3 + ) rand_time_sample.append(rand_time) - assert all(rand_time >= appliance_instance.func_cycle for rand_time in rand_time_sample) - - + assert all( + rand_time >= appliance_instance.func_cycle for rand_time in rand_time_sample + ) + # Tests that the method returns a value less than or equal to 0.99 * total_time - @pytest.mark.usefixtures("appliance_instance") + @pytest.mark.usefixtures("appliance_instance") def test_rand_time_less_than_99_percent_total_time(self, appliance_instance): rand_window_1 = [0, 100] rand_window_2 = [200, 300] @@ -63,7 +66,12 @@ def test_rand_time_less_than_99_percent_total_time(self, appliance_instance): appliance_instance.func_time = 200 appliance_instance.time_fraction_random_variability = 0.5 # Call the method from the class - rand_time = appliance_instance.rand_total_time_of_use(rand_window_1, rand_window_2, rand_window_3) - total_time = (rand_window_1[1] - rand_window_1[0]) + (rand_window_2[1] - rand_window_2[0]) + (rand_window_3[1] - rand_window_3[0]) + rand_time = appliance_instance.rand_total_time_of_use( + rand_window_1, rand_window_2, rand_window_3 + ) + total_time = ( + (rand_window_1[1] - rand_window_1[0]) + + (rand_window_2[1] - rand_window_2[0]) + + (rand_window_3[1] - rand_window_3[0]) + ) assert rand_time <= 0.99 * total_time - diff --git a/tests/test_switch_on.py b/tests/test_switch_on.py index 456bd35a..6a4160f8 100644 --- a/tests/test_switch_on.py +++ b/tests/test_switch_on.py @@ -11,34 +11,32 @@ from scipy import stats - class TestRandSwitchOnWindow: - # Test when self.fixed = yes, all the apps are switched on together (should return the total number of apps) def test_all_appliances_switched_on_together(self): - appliance = Appliance(user=None, number=5, fixed='yes') + appliance = Appliance(user=None, number=5, fixed="yes") coincidence = appliance.calc_coincident_switch_on() assert isinstance(coincidence, int) assert coincidence == appliance.number - + # Test when self.fixed= no and the index value lies in peak_time_range (test whether the coincidence values are normally distributed or not) def test_coincidence_normality_on_peak(self): # Create an instance of the Appliance class with the desired parameters - appliance = Appliance(user=None, number=10, fixed='no') - - # Generate a sample of 'coincidence' values + appliance = Appliance(user=None, number=10, fixed="no") + + # Generate a sample of 'coincidence' values sample_size = 30 coincidence_sample = [] for _ in range(sample_size): coincidence = appliance.calc_coincident_switch_on(inside_peak_window=True) coincidence_sample.append(coincidence) - + # Perform the Shapiro-Wilk test for normality _, p_value = stats.shapiro(coincidence_sample) - - # Assert that the p-value is greater than a chosen significance level + + # Assert that the p-value is greater than a chosen significance level assert p_value > 0.05, "The 'coincidence' values are not normally distributed." - + # Tests that the method returns a list of indexes within the available functioning windows when there are multiple available functioning windows and the random time is larger than the duration of the appliance's function cycle. def test_happy_path(self): appliance = Appliance(user=None, func_cycle=2) @@ -79,6 +77,7 @@ def test_edge_case_multiple_free_spots(self): appliance = Appliance(user=None, func_cycle=2) appliance.free_spots = [slice(0, 5), slice(10, 15), slice(20, 25)] indexes = appliance.rand_switch_on_window(rand_time=6) - assert all(index in range(0, 5) or index in range(10, 15) or index in range(20, 25) for index in indexes) - - \ No newline at end of file + assert all( + index in range(0, 5) or index in range(10, 15) or index in range(20, 25) + for index in indexes + )