Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ma5 Restframes implementation #281

Draft
wants to merge 29 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
6dcf311
Integrating RestFrames, Eigen and Neldemear (as done in HackAnalysis)
BFuks May 30, 2024
1068790
Adding a RestFrames accessor from the physics service class
BFuks Jun 5, 2024
90e01cd
fixing a boost
BFuks Jun 19, 2024
aa4d24d
updating release date
BFuks Jun 19, 2024
22ca87f
Adding HEPdata efficiency readers
BFuks Jul 19, 2024
f304716
updating version dates, and changelog
BFuks Jul 19, 2024
0332134
Fixing a few bugs/warnings related to compilation on linux
BFuks Aug 1, 2024
26bd986
updating date tag
BFuks Aug 1, 2024
1cbc8e1
typo in the Rodrigues rotation formula
BFuks Aug 9, 2024
605e76b
small bug fix in run_recast.py
BFuks Aug 30, 2024
c9d1de8
small bug fix, again + versioning
BFuks Aug 30, 2024
4bf2bed
Merge branch 'main' into ma5_restframes
jackaraz Dec 20, 2024
2d73c2b
integrate spey
jackaraz Jan 10, 2025
f79e535
require spey
jackaraz Jan 10, 2025
006a4c3
update req
jackaraz Jan 10, 2025
3c30789
update updatechecker
jackaraz Jan 10, 2025
212439d
add spey detection
jackaraz Jan 10, 2025
51f3e93
update spey veto
jackaraz Jan 10, 2025
bd17f8e
update package checker
jackaraz Jan 10, 2025
c5b32d1
update checker
jackaraz Jan 10, 2025
a32c288
simplify
jackaraz Jan 10, 2025
e860563
fix expectation vals
jackaraz Jan 10, 2025
ef10548
simplify
jackaraz Jan 10, 2025
36fb7d6
update
jackaraz Jan 10, 2025
23ed41f
simplify the construction
jackaraz Jan 11, 2025
e2b6fd7
installation of data files with the PAD, for efficiencies
BFuks Jan 30, 2025
8eedcbe
Merge branch 'ma5_restframes' of https://github.com/MadAnalysis/madan…
BFuks Jan 30, 2025
353a97f
pushing changes related to the new dataverse link format
BFuks Jan 31, 2025
30cc7ea
some bug fixes with the spey/pyhf compatibility
BFuks Jan 31, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
8 changes: 4 additions & 4 deletions bin/ma5
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ This is the main executable, a simple frontend to set up the PYTHONPATH
and call immediately the command line interface scripts
"""

import importlib
from importlib import util
import os
import sys

Expand All @@ -48,7 +48,7 @@ if sys.version_info[0] != 3 or sys.version_info[1] <= 6:
)

# Checking that the 'six' package is present
if not importlib.util.find_spec("six"):
if not util.find_spec("six"):
sys.exit(
'The python "six" module is not found on your system and it is required for MadAnalysis 5 for '
+ "a question of Python 2/3 compatibility. Please install it with the following command:\n"
Expand All @@ -74,8 +74,8 @@ sys.path.insert(0, servicedir)

# Release version
# Do not touch it !!!!!
version = "1.10.16"
date = "2024/08/26"
version = "1.10.17"
date = "2024/08/30"

# Loading the MadAnalysis session
import madanalysis.core.launcher
Expand Down
3 changes: 3 additions & 0 deletions doc/releases/changelog-v1.10.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,9 @@

* Include c-tagging options.

* Integrating RestFrames as well as readers for CSV-encoded efficiencies provided on HEPData.


## Bug fixes

* Permanently fix the zlib version to the latest.
Expand Down
860 changes: 533 additions & 327 deletions madanalysis/core/main.py

Large diffs are not rendered by default.

71 changes: 65 additions & 6 deletions madanalysis/install/install_pad.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,13 +55,15 @@ def __init__(self,main, padname):
self.files = {
"padsfs.dat" : "https://madanalysis.irmp.ucl.ac.be/raw-attachment/wiki/SFS/padsfs3.dat",
"bib_padsfs.dat" : "http://madanalysis.irmp.ucl.ac.be/raw-attachment/wiki/MA5SandBox/bib_pad3.dat",
"json_padsfs.dat": "http://madanalysis.irmp.ucl.ac.be/raw-attachment/wiki/SFS/json_padsfs3.dat"
"json_padsfs.dat": "http://madanalysis.irmp.ucl.ac.be/raw-attachment/wiki/SFS/json_padsfs3.dat",
"csv_padsfs.dat" : "http://madanalysis.irmp.ucl.ac.be/raw-attachment/wiki/SFS/csv_padsfs.dat"
}
self.analyses = []
self.analysis_files = []
self.pileup_files = []
self.delphes_cards = []
self.json_cards = []
self.csv_cards = []


def Detect(self):
Expand Down Expand Up @@ -137,6 +139,12 @@ def CreatePackageFolder(self):
if not ok:
return False

# CSV files
if "csv_padsfs.dat" in self.files.keys():
logging.debug('Creating folder '+self.installdir+'/Input/CSV')
TheCommand = ['mkdir', self.installdir+'/Input/CSV']
ok= ShellCommand.Execute(TheCommand,self.main.archi_info.ma5dir+'/tools')

# EXIT
return True

Expand Down Expand Up @@ -167,6 +175,24 @@ def Download(self):
[ line.strip().split('|')[1].split(), line.strip().split('|')[2].split()];
json_input.close();

# CSV files
csv_dictionary = {}
if self.padname in ['PADForSFS']:
csv_struct_name = [x for x in self.files.keys() if 'csv' in x]
if len(csv_struct_name) == 1:
logging.getLogger('MA5').debug(" ** Getting the list of data CSV files in " + self.downloaddir+"/"+csv_struct_name[0])
csv_input = open(os.path.join(self.downloaddir,csv_struct_name[0]))
for line in csv_input:
if len(line.strip())==0 or line.strip().startswith('#'): continue
analysis_name = line.strip().split('|')[0].strip()
server_name = line.strip().split('|')[1].strip()
csv_name = line.strip().split('|')[2].strip()
tag = line.strip().split('|')[3].strip()
if not analysis_name in csv_dictionary.keys(): csv_dictionary[analysis_name] = {}
if not server_name in csv_dictionary[analysis_name].keys(): csv_dictionary[analysis_name][server_name] = {}
csv_dictionary[analysis_name][server_name][csv_name] = tag
csv_input.close();

# Getting the analysis one by one (and creating first skeleton analyses for each of them)
logging.getLogger('MA5').debug('Reading the analysis list in ' + \
os.path.join(self.downloaddir,self.padname.replace('For','').lower()+'.dat'))
Expand All @@ -188,7 +214,11 @@ def Download(self):
# getting the dataverse URLs
delphes_url='';
if 'dataverse' in delphes:
delphes_url = 'https://dataverse.uclouvain.be/api/access/datafile/'+delphes.split()[-1][:-1];
if len(delphes.split())==3:
delphes_url = 'https://dataverse.uclouvain.be/api/access/datafile/'+delphes.split()[-1][:-1];
if len(delphes.split())==4:
delphes_url = 'https://dataverse.uclouvain.be/api/access/datafile/:persistentId?persistentId=doi:10.14428/DVN/' + \
delphes.split()[2].strip() + '/' + delphes.split()[-1][:-1].strip();
if len(analysis)==0 and len(url)==0:
delphes = delphes.split()[1]
elif self.padname!='PADForSFS':
Expand Down Expand Up @@ -234,15 +264,40 @@ def Download(self):
if 'dataverse' in url:
exts = ['cpp', 'h', 'info'];
anl_files = url.split(']')[0].split()[1:];
doi=''
if len(anl_files)==4:
doi=anl_files[0]
anl_files=anl_files[1:]
for i in range(len(anl_files)):
files[analysis+'.'+exts[i]] = 'https://dataverse.uclouvain.be/api/access/datafile/'+anl_files[i];
if doi=='':
files[analysis+'.'+exts[i]] = 'https://dataverse.uclouvain.be/api/access/datafile/'+anl_files[i];
else:
files[analysis+'.'+exts[i]] = 'https://dataverse.uclouvain.be/api/access/datafile/:persistentId?persistentId=doi:10.14428/DVN/'+\
doi + '/' + anl_files[i];
## json files
if analysis in list(json_dictionary.keys()):
for i_json in range(len(json_dictionary[analysis][0])):
files[analysis+'_'+json_dictionary[analysis][0][i_json]+'.json'] =\
'https://dataverse.uclouvain.be/api/access/datafile/' + json_dictionary[analysis][1][i_json]
if doi=='':
files[analysis+'_'+json_dictionary[analysis][0][i_json]+'.json'] =\
'https://dataverse.uclouvain.be/api/access/datafile/' + json_dictionary[analysis][1][i_json]
else:
files[analysis+'_'+json_dictionary[analysis][0][i_json]+'.json'] =\
'https://dataverse.uclouvain.be/api/access/datafile/:persistentId?persistentId=doi:10.14428/DVN/' + doi + '/' + json_dictionary[analysis][1][i_json]
self.json_cards.append(analysis+'_'+json_dictionary[analysis][0][i_json]+'.json')
self.analysis_files.append(analysis+'_'+json_dictionary[analysis][0][i_json]+'.json')
## CSV files
if analysis in list(csv_dictionary.keys()):
TheCommand = ['mkdir', self.installdir+'/Input/CSV/'+ analysis.upper()]
ok= ShellCommand.Execute(TheCommand,self.main.archi_info.ma5dir+'/tools')
for k, v in csv_dictionary[analysis].items():
if k != 'dataverse': continue
for file, tag in v.items():
if doi=='':
files[file+'.csv'] = 'https://dataverse.uclouvain.be/api/access/datafile/' + tag
else:
files[file+'.csv'] = 'https://dataverse.uclouvain.be/api/access/datafile/:persistentId?persistentId=doi:10.14428/DVN/' +\
doi + '/' + tag
self.csv_cards.append([analysis, file+'.csv'])
else:
if url=='MA5-local':
url='http://madanalysis.irmp.ucl.ac.be/raw-attachment/wiki/'
Expand Down Expand Up @@ -313,10 +368,14 @@ def Unpack(self):
oldfile = os.path.join(self.downloaddir, analysis + '.'+extension)
shutil.copy(oldfile,newfile)

# json files fopr pyhf
# json files for pyhf
for json in self.json_cards:
shutil.copy(os.path.join(self.downloaddir,json), self.PADdir)

# data files
for csv in self.csv_cards:
shutil.copy(os.path.join(self.downloaddir,csv[1]), self.installdir+'/Input/CSV/'+ csv[0].upper())

# the delphes cards
for myfile in self.delphes_cards:
shutil.copy(os.path.join(self.downloaddir,myfile), self.delphesdir)
Expand Down
Loading
Loading