Skip to content

Commit

Permalink
Changes of working dir
Browse files Browse the repository at this point in the history
  • Loading branch information
albertoamaduzzi committed Aug 2, 2024
1 parent c12c09c commit 30559cd
Show file tree
Hide file tree
Showing 19 changed files with 643 additions and 339 deletions.
38 changes: 9 additions & 29 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,14 @@ Input:
./bin/city-pro ./work_geo/bologna_mdt_detailed/date/config_bologna.json
```

## Complete Analysis
```
python3 ./python/LaunchParallelCpp.py
```
The script automatically will set the configuration files for each day by calling `SetRightDirectoriesConfiguration.py` and
then will launch in parallel `main_city-pro.cpp` for each day.
Once the cpp analysis finishes, will run also python analysis on the data so obtained.
# Output:


Expand Down Expand Up @@ -243,33 +251,5 @@ For own cartography the parameter needs to be changed.
To be specified ...

#### Python
To be specified ...
# LAUNCH ANALYSIS (WORK IN PROGRESS)

``` ./python/config_subnet_create.py ```
(README in the file)
Output:
all_subnets.sh
work_geo/bologna_mdt_detailed/date/plot_subnet

AnalysisPaper.ipynb (non è il top affatto)
Bisogna inserire manualmente gli indirizzi dove è salvata la roba nella prima cella. Fatto questo si possono runnare le altre celle.
Poi lanciare cella per cella:
Input:
fcm.csv
stats.csv
timed_fluxes.csv
Output:
distribuzione velocità per ogni classe
distribuzione lunghezze e tempi per ogni classe
fondamental diagram per ogni classe


#### POSTPROCESSING AGGREGATION PYTHON
COMMAND:
python3 fondamental_diagram_aggregated.py -c config_fundamental_diagram_aggregated.json
Input:
class_i_velocity_subnet.csv
_fcm.csv

```python3 ./python/work_mdt/script/AnalysisMdt/AnalysisPaper.py```

2 changes: 1 addition & 1 deletion bologna-provincia.geojson

Large diffs are not rendered by default.

20 changes: 19 additions & 1 deletion python/LaunchParallelCpp.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,26 @@ def run_command(cmd):
subprocess.run(cmd, shell=True, check=True)
except subprocess.CalledProcessError as e:
print(f"Error executing command: {cmd}\n{e}")
def run_adjust_configuration_file(cmd):
"""Function to execute a command in the shell."""
try:
subprocess.run(cmd, shell=True, check=True)
except subprocess.CalledProcessError as e:
print(f"Error executing command: {cmd}\n{e}")

def run_python_analysis(cmd):
"""Function to execute a command in the shell."""
try:
subprocess.run(cmd, shell=True, check=True)
except subprocess.CalledProcessError as e:
print(f"Error executing command: {cmd}\n{e}")

if __name__ == "__main__":
# Create a pool of workers equal to the number of commands
command_set_right_values_config = "./python/SetRightDirectoriesConfiguration.py"
run_adjust_configuration_file(command_set_right_values_config)
with Pool(len(commands)) as pool:
pool.map(run_command, commands)
pool.map(run_command, commands)

cmd_python_analysis = "python3 ./python/work_mdt/script/AnalysisMdt/AnalysisPaper.py"
run_python_analysis(cmd_python_analysis)
4 changes: 4 additions & 0 deletions python/SetRightDirectoriesConfiguration.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,10 @@
config["enable_subnet"] = True
config["jump2subnet_analysis"] = True
config["max_poly_length"] = 6000
# NOTE: Status -> Try one more class then the usual analysis to see if the Fcm Clustering separates walkers from bikes.
config["num_tm"] = config["num_tm"] + 1
config["num_tm_subnet"] = config["num_tm_subnet"] + 1

with open(os.path.join(WORKSPACE,"city-pro","bologna_mdt_detailed",StrDate,"config_bologna.json"),"w") as f:
config = json.dump(config,f,indent=2)

Expand Down
136 changes: 123 additions & 13 deletions python/work_mdt/script/AnalysisMdt/AnalysisNetwork1Day.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -470,22 +470,116 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 1,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"5725.53576641327"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
"name": "stdout",
"output_type": "stream",
"text": [
"DejaVu Math TeX Gyre\n",
"DejaVu Sans\n",
"DejaVu Sans\n",
"DejaVu Sans\n",
"DejaVu Sans\n",
"DejaVu Sans\n",
"DejaVu Sans\n",
"DejaVu Sans\n",
"DejaVu Sans\n",
"DejaVu Sans\n",
"DejaVu Sans\n",
"DejaVu Sans\n",
"DejaVu Sans\n",
"DejaVu Sans\n",
"DejaVu Sans\n",
"DejaVu Sans Display\n",
"DejaVu Sans Mono\n",
"DejaVu Sans Mono\n",
"DejaVu Sans Mono\n",
"DejaVu Sans Mono\n",
"DejaVu Sans Mono\n",
"DejaVu Sans Mono\n",
"DejaVu Sans Mono\n",
"DejaVu Sans Mono\n",
"DejaVu Serif\n",
"DejaVu Serif\n",
"DejaVu Serif\n",
"DejaVu Serif\n",
"DejaVu Serif\n",
"DejaVu Serif\n",
"DejaVu Serif\n",
"DejaVu Serif\n",
"DejaVu Serif\n",
"DejaVu Serif\n",
"DejaVu Serif\n",
"DejaVu Serif\n",
"DejaVu Serif Display\n",
"Inconsolata\n",
"Inconsolata\n",
"STIXGeneral\n",
"STIXGeneral\n",
"STIXGeneral\n",
"STIXGeneral\n",
"STIXNonUnicode\n",
"STIXNonUnicode\n",
"STIXNonUnicode\n",
"STIXNonUnicode\n",
"STIXSizeFiveSym\n",
"STIXSizeFourSym\n",
"STIXSizeFourSym\n",
"STIXSizeOneSym\n",
"STIXSizeOneSym\n",
"STIXSizeThreeSym\n",
"STIXSizeThreeSym\n",
"STIXSizeTwoSym\n",
"STIXSizeTwoSym\n",
"Source Code Pro\n",
"Source Code Pro\n",
"Source Code Pro\n",
"Source Code Pro\n",
"Source Code Pro\n",
"Source Code Pro\n",
"Source Code Pro\n",
"Source Code Pro\n",
"Source Code Pro\n",
"Source Code Pro\n",
"Source Code Pro\n",
"Source Code Pro\n",
"Source Code Pro\n",
"Source Code Pro\n",
"Ubuntu\n",
"Ubuntu\n",
"Ubuntu\n",
"Ubuntu\n",
"Ubuntu\n",
"Ubuntu\n",
"Ubuntu\n",
"Ubuntu\n",
"Ubuntu\n",
"Ubuntu Condensed\n",
"Ubuntu Mono\n",
"Ubuntu Mono\n",
"Ubuntu Mono\n",
"Ubuntu Mono\n",
"cmb10\n",
"cmex10\n",
"cmmi10\n",
"cmr10\n",
"cmss10\n",
"cmsy10\n",
"cmtt10\n"
]
}
],
"source": [
"max(gdf[\"poly_length\"])"
"import matplotlib.font_manager as fm\n",
"\n",
"# List all available fonts\n",
"available_fonts = sorted([f.name for f in fm.fontManager.ttflist])\n",
"\n",
"# Print the available fonts\n",
"for font in available_fonts:\n",
" print(font)"
]
},
{
Expand Down Expand Up @@ -529,10 +623,26 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": []
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[25, 67, 28] 0.1517484756935079 0.319497325302865 0.8399702407907828\n"
]
}
],
"source": [
"from numpy import random\n",
"L = 3\n",
"comb = [random.randint(20,100) for i in range(L)]\n",
"c0 = random.random()*2\n",
"c1 = random.random()*2\n",
"d = random.random()*2\n",
"print(comb,c0,c1,d)\n"
]
}
],
"metadata": {
Expand Down
111 changes: 33 additions & 78 deletions python/work_mdt/script/AnalysisMdt/AnalysisNetwork1Day.py
Original file line number Diff line number Diff line change
Expand Up @@ -1151,6 +1151,13 @@ def PlotTimePercorrenceDistributionAllClasses(self):


def PlotTimePercorrenceConditionalLengthRoad(self):
"""
Description:
1) Draws the distribution of road lengths
2) Draws the time percorrence distribution conditioned to the length of the road.
"""
self.PlotDistributionLengthRoadPerClass()
self.CountFunctionsCalled += 1

# Drop rows with NaN values in the 'poly_length' column
Expand Down Expand Up @@ -1193,7 +1200,33 @@ def PlotTimePercorrenceConditionalLengthRoad(self):
Message += "\tComputed Length2Class2Time2Distr, Length2Class2AvgTimePercorrence"
AddMessageToLog(Message,self.LogFile)

def PlotDistributionLengthRoadPerClass(self):
"""
Compute the distribution of roads length for each class.
NOTE: The idea is to try to understand if there is some bias in the length of the roads for each class.
NOTE: By bias I mean that the distributions are peaked around some length, and the centroid for different
classes are different, maybe in the sense of sigmas in a gaussian distribution.
"""
plt.subplots(1,1,figsize = (10,10))
self.IntClass2RoadLengthDistr = {IntClass:[] for IntClass in self.IntClass2StrClass.keys()}
for IntClass in self.IntClass2StrClass.keys():
Lengths = self.Class2DfSpeedAndTimePercorrenceRoads[IntClass]["poly_length"]
n,bins = np.histogram(Lengths,bins = 100)
self.IntClass2RoadLengthDistr[IntClass] = {"n":n,"bins":bins}
sns.histplot(Lengths,bins = 100,label = self.IntClass2StrClass[IntClass],kde = True)
plt.legend()
plt.xlabel("Length [m]")
plt.ylabel("Counts")
plt.title("Distribution of Roads Length for Each Class")
plt.savefig(os.path.join(self.PlotDir,"DistributionRoadLengthPerClass_{0}.png".format(self.StrDate)))
plt.close()

def GetTime2ClassPeople(self):
"""
Get the number of people for each quarter of hour for each class.
"""
self.Class2Time2NPeople = GetPartitionClass2Time2Npeople(self.Class2DfSpeedAndTimePercorrenceRoads)
PlotTime2NumberPeopleClasses(self.Class2Time2NPeople,self.IntClass2StrClass,self.PlotDir)
##--------------- Dictionaries --------------##
def CreateDictionaryIntClass2StrClass(self):
'''
Expand Down Expand Up @@ -1554,81 +1587,3 @@ def PrintBool(self):
print("Incremental subnet: ",self.ReadFluxesSubIncreasinglyIncludedIntersectionBool)


def GetDistributionPerClass(fcm,Feature,class_):
"""
Input:
Feature: str -> time, lenght, av_speed, p, a_max
Returns:
n, bins of velocity distribution
"""
n, bins = np.histogram(fcm.filter(pl.col("class") == class_)[Feature].to_list(), bins = 50)



def PlotSubnetHTML(ListDailyNetwork,Daily = True):
for DailyNetwork in ListDailyNetwork:
list_of_lists = DailyNetwork.IntClass2Roads
# Create a base map
m = folium.Map()

# Iterate through the list of lists
for class_, index_list in list_of_lists.items():
# Filter GeoDataFrame for roads with indices in the current list
filtered_gdf = DailyNetwork.GeoJson[DailyNetwork.GeoJson['index'].isin(index_list)]

# Create a feature group for the current layer
layer_group = folium.FeatureGroup(name=f"Layer {class_}").add_to(m)

# Add roads to the feature group with a unique color
for _, road in filtered_gdf.iterrows():
color = 'blue' # Choose a color for the road based on index or any other criterion
folium.GeoJson(road.geometry, style_function=lambda x: {'color': color}).add_to(layer_group)

# Add the feature group to the map
layer_group.add_to(m)

# Add layer control to the map
folium.LayerControl().add_to(m)

# Save or display the map
m.save("map_with_layers.html")




"""resolution = 100
n_bins_std = 100
bin_width = 5
rescaling_factor_pdf = resolution/n_bins_std
i=0
for fcm_data in fcm:
plot_distribution_velocity_all_class_together_per_day(fcm_data,list_dict_name,i)
plot_aggregated_velocity(fcm_data,list_dict_name,i)
for cl,df in fcm_data.groupby('class'):
if cl!=10 and len(list_dict_name[i][cl])!=0:
n,bins = np.histogram(df['av_speed'].to_numpy(),bins = n_bins_std,range = [0,n_bins_std-bin_width])
scaling_factor_data = np.sum(n)
initial_guess_sigma = np.std(df['av_speed'].to_numpy())
initial_guess_mu = np.mean(df['av_speed'].to_numpy())
params, pcov = curve_fit(maxwellian,xdata = bins[:-1],ydata = np.array(n)/scaling_factor_data, p0=[initial_guess_sigma, initial_guess_mu])
a_maxwell,b_maxwell = params
print("covariance matrix a,b:\n",pcov)
print("a_maxwell,b_maxwell:\n",a_maxwell,b_maxwell)
# a_maxwell,b_maxwell = maxwell.fit(df['av_speed'].to_numpy(),floc = np.mean(df['av_speed']))
a_gauss,b_gauss = norm.fit(df['av_speed'].to_numpy(),floc = np.mean(df['av_speed']))
fig,ax = plt.subplots(1,1,figsize= (15,12))
plt.hist(df['av_speed'].to_numpy(),bins = n_bins_std,range = [0,n_bins_std-bin_width],density = True)
av_speed = np.mean(df['av_speed'].to_numpy())
ax.set_xlabel('average speed (m/s)')
ax.set_ylabel('Count')
ax.set_title(list_dict_name[i][cl] + ' vel: ' + str(round(av_speed,3)) +' m/s')
# print('maxwell pdf:\n ',maxwellian(np.linspace(min(bins),max(bins),resolution),a_maxwell,b_maxwell))
# print('gaussian pdf rescaled:\n ',norm.pdf(np.linspace(min(bins),max(bins),resolution),a_gauss,b_gauss))
plt.plot(np.linspace(min(bins),max(bins),resolution),maxwellian(np.linspace(min(bins),max(bins),resolution),a_maxwell,b_maxwell),label = 'maxwell',color = 'violet')
plt.plot(np.linspace(min(bins),max(bins),resolution),norm.pdf(np.linspace(min(bins),max(bins),resolution),a_gauss,b_gauss),label = 'gauss',color = 'red')
plt.legend(['maxwell','gauss'])
plt.savefig(os.path.join(s_dir[i],'average_speed_{}.png'.format(list_dict_name[i][cl])),dpi = 200)
plt.show()
i+=1
"""
Loading

0 comments on commit 30559cd

Please sign in to comment.