Skip to content

Commit

Permalink
Merge branch 'develop'
Browse files Browse the repository at this point in the history
  • Loading branch information
WolfgangWaltenberger committed Jan 22, 2025
2 parents 92b7b12 + 0d452cc commit 7a079b0
Show file tree
Hide file tree
Showing 16 changed files with 893 additions and 123 deletions.
7 changes: 7 additions & 0 deletions ReleaseNotes
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
Release v3.0.2, Thu 23 Jan 2025
=======================================================

* Small fix in pyhf caching code (results should not change)
* Fixes with parallelisation of resummino cross section computer
* Tiny fix to allow '#' without subsequent whitespace as comment in text database

Release v3.0.1, Thu 31 Oct 2024
=======================================================

Expand Down
7 changes: 7 additions & 0 deletions docs/manual/source/ReleaseUpdate.rst
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,13 @@ What's New
==========
The major novelties of all releases since v1.0 are as follows:

New in Version 3.0.2:
^^^^^^^^^^^^^^^^^^^^^

* Small fix in pyhf caching code (results should not change)
* Fixes with parallelisation of :ref:`resummino cross section <xsecResummino>` computer
* Tiny fix to allow '#' without subsequent whitespace as comment in text database

New in Version 3.0.1:
^^^^^^^^^^^^^^^^^^^^^

Expand Down
16 changes: 12 additions & 4 deletions smodels/base/runtime.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,13 @@
"""

from typing import Union

## place to keep the pointer to the model file (default = mssm)
modelFile="smodels.share.models.mssm"

_experimental = False ## turn on experimental features
_experimental = { "truncatedgaussians": False,
"spey": False } ## experimental features

_deltas_rel_default = .2 ## the default relative error on the signal strength

Expand Down Expand Up @@ -51,11 +54,16 @@ def filetype ( filename ):
return None


def experimentalFeatures():
""" a simple boolean flag to turn experimental features on/off,
def experimentalFeature( feature : str ) -> Union[None,bool]:
""" method to check if a certain experimental feature is enabled.
can be turned on and off via options:experimental in parameters.ini.
:param feature: ask for feature
:returns: None if feature does not exist, else boolean
"""
return _experimental
if not feature in _experimental:
return None
return _experimental[feature]

def nCPUs():
""" obtain the number of *available* CPU cores on the machine, for several
Expand Down
53 changes: 28 additions & 25 deletions smodels/experiment/databaseObj.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def _getSHA1(filename):

# some mechanism to remove lock files if the download got interrupted
import atexit
lockfiles = set()
lockfiles = set()

def removeLockFiles( lockfiles ):
""" remove cruft lockfiles """
Expand All @@ -73,13 +73,13 @@ def removeLockFiles( lockfiles ):
atexit.register ( removeLockFiles, lockfiles )

class Database(object):
"""
"""
Database object. Holds a list of SubDatabases and the ExpSMS map.
Delegates all calls to SubDatabases.
"""

def __init__(self, base=None, force_load=None,
progressbar=False, subpickle=True,
progressbar=False, subpickle=True,
combinationsmatrix=None):
"""
:param base: path to the database, or pickle file (string), or http
Expand All @@ -99,17 +99,15 @@ def __init__(self, base=None, force_load=None,
optionally specifying signal regions, e.g. { "anaid1:SR1":
( "anaid2:SR2", "anaid3" ) }
"""

self.subs = []

if "_fastlim" in base: # for backwards compatibility
base = base.replace("_fastlim", "+fastlim")
sstrings = base.split("+")
for ss in sstrings:
self.subs.append(SubDatabase(ss, force_load,
progressbar, subpickle,
progressbar, subpickle,
combinationsmatrix))


# Compute SMS dict with all results
self._allExpSMSDict = ExpSMSDict(self.expResultList)
Expand All @@ -120,7 +118,7 @@ def __init__(self, base=None, force_load=None,
@property
def expResultList(self):
"""
The combined list of results, compiled from the
The combined list of results, compiled from the
the active results in each subdatabase.
"""

Expand Down Expand Up @@ -235,11 +233,11 @@ def getExpResults(self, analysisIDs=['all'], datasetIDs=['all'], txnames=['all']
Select (filter) the results within the database satisfying the restrictions set by the arguments and returns the corresponding results.
"""

self.selectExpResults(analysisIDs=analysisIDs, datasetIDs=datasetIDs,
txnames=txnames, dataTypes=dataTypes,
self.selectExpResults(analysisIDs=analysisIDs, datasetIDs=datasetIDs,
txnames=txnames, dataTypes=dataTypes,
useNonValidated=useNonValidated,
onlyWithExpected=onlyWithExpected)

return self.expResultList[:]

def selectExpResults(self, analysisIDs=['all'], datasetIDs=['all'], txnames=['all'],
Expand Down Expand Up @@ -420,8 +418,8 @@ def __init__(self, base=None, force_load=None,
self._setParticles()
self.txt_meta.printFastlimBanner()
return
logger.error("when initialising database: force_load=%s is not "
"recognized. Valid values are: pcl, txt, None." % force_load)
logger.error( f"when initialising database: force_load={force_load} is not "
"recognized. Valid values are: pcl, txt, None." )
raise SModelSError()

def __eq__(self, other):
Expand Down Expand Up @@ -462,16 +460,21 @@ def loadDatabase(self):
it needs update, create new binary file, in
case it does need an update.
"""
if not os.path.exists(self.pcl_meta.pathname):
logger.info("Creating binary database ")
logger.info("(this may take a few minutes, but it's done only once!)")
self.loadTextDatabase()
self.createBinaryFile()
else:
if self.needsUpdate():
try:
if not os.path.exists(self.pcl_meta.pathname):
logger.info("Creating binary database ")
logger.info("(this may take a few minutes, but it's done only once!)")
self.loadTextDatabase()
self.createBinaryFile()
else:
self.loadBinaryFile(lastm_only=False)
if self.needsUpdate():
self.createBinaryFile()
else:
self.loadBinaryFile(lastm_only=False)
except Exception as e:
import traceback
logger.error ( "when loading database: {e}, {traceback.format_exc()}" )
sys.exit(-1)

def loadTextDatabase(self):
""" simply loads the textdabase """
Expand Down Expand Up @@ -557,7 +560,7 @@ def loadBinaryFile(self, lastm_only=False):
t1 = time.time()-t0
logger.info("Loaded database from %s in %.1f secs." %
(self.pcl_meta.pathname, t1))
self.databaseParticles = None
self.databaseParticles = None
try:
self.databaseParticles = serializer.load(f)
except EOFError as e:
Expand Down Expand Up @@ -669,7 +672,7 @@ def base(self):
def lockFile ( self, filename : os.PathLike ):
""" lock the file <filename>
"""
lockfile = os.path.join ( os.path.dirname ( filename ),
lockfile = os.path.join ( os.path.dirname ( filename ),
".lock_"+ os.path.basename ( filename ) )
ctr = 0
while ( ctr < 5 ):
Expand Down Expand Up @@ -700,7 +703,7 @@ def lockFile ( self, filename : os.PathLike ):
def unlockFile ( self, filename : os.PathLike ):
""" unlock the file <filename>
"""
lockfile = os.path.join ( os.path.dirname ( filename ),
lockfile = os.path.join ( os.path.dirname ( filename ),
".lock_"+ os.path.basename ( filename ) )
if lockfile in lockfiles:
lockfiles.remove( lockfile )
Expand Down Expand Up @@ -1079,7 +1082,7 @@ def setActiveExpResults(self, analysisIDs=['all'], datasetIDs=['all'], txnames=[
"""

self._activeResults = self.getExpResults(analysisIDs, datasetIDs, txnames,
dataTypes, useNonValidated,
dataTypes, useNonValidated,
onlyWithExpected)

def getExpResults(self, analysisIDs=['all'], datasetIDs=['all'], txnames=['all'],
Expand Down
2 changes: 1 addition & 1 deletion smodels/experiment/infoObj.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def __init__(self, path=None):
for i, tag in enumerate(tags):
if not tag:
continue
if tag.startswith("# "): # a comment!
if tag.startswith("#"): # a comment!
continue
line = content[i]
value = line.split(':', 1)[1].strip()
Expand Down
28 changes: 17 additions & 11 deletions smodels/matching/modelTester.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,11 +178,10 @@ def testPoint(inputFile, outputDir, parser, database):
useBest = False
except (NoSectionError, NoOptionError):
pass
try:
expFeatures = parser.getboolean("options", "experimentalFeatures")
runtime._experimental = expFeatures
except (NoSectionError, NoOptionError):
pass

if parser.has_section ( "experimentalFeatures" ):
featuresDict = dict(parser.items("experimentalFeatures"))
setExperimentalFeatures( featuresDict )

allPredictions = theoryPredictionsFor(database, smstoplist,
useBestDataset=useBest,
Expand Down Expand Up @@ -530,7 +529,11 @@ def getParameters(parameterFile):
if ret == []:
logger.error("No such file or directory: '%s'" % parameterFile)
sys.exit()
setExperimentalFlag(parser)

if parser.has_section ( "experimentalFeatures" ):
featuresDict = dict(parser.items("experimentalFeatures"))
setExperimentalFeatures(featuresDict)

try:
runtime.modelFile = parser.get("particles", "model")
except:
Expand All @@ -544,12 +547,15 @@ def getParameters(parameterFile):
return parser


def setExperimentalFlag(parser):
""" set the experimental flag, if options:experimental = True """
if parser.has_option("options", "experimental"):
if parser.getboolean("options", "experimental"):
runtime._experimental = True
def setExperimentalFeatures(featuresDict):
""" set the experimental features flats, if experimentalFeatures:* = True """

for feature in featuresDict.keys():
if not feature in runtime._experimental:
logger.warning ( f"'{feature}' is not a known experimental feature. will ignore." )
continue
flag = bool(featuresDict[feature])
runtime._experimental[feature]=flag

def getAllInputFiles(inFile):
"""
Expand Down
12 changes: 8 additions & 4 deletions smodels/matching/theoryPrediction.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
from smodels.matching.exceptions import SModelSMatcherError as SModelSError
from smodels.matching import clusterTools
from smodels.base.smodelsLogging import logger
from smodels.statistics.statsTools import StatsComputer
from typing import Union, Text, Dict
import numpy as np

Expand Down Expand Up @@ -144,10 +143,12 @@ def setStatsComputer(self):
to define a statistical computer (upper limit result or no expected
upper limits), set the computer to 'N/A'.
"""
from smodels.statistics.statsTools import getStatsComputerModule
StatsComputer = getStatsComputerModule()

if self.dataType() == "upperLimit":
from smodels.base.runtime import experimentalFeatures
if not experimentalFeatures():
from smodels.base.runtime import experimentalFeature
if not experimentalFeature( "truncatedGaussians" ):
computer = 'N/A'
else:
computer = StatsComputer.forTruncatedGaussian(self)
Expand Down Expand Up @@ -373,7 +374,8 @@ def computeStatistics(self, expected=False):
self.cachedObjs[expected]["nll_sm"] = llhdDict["lsm"]
self.cachedObjs[expected]["nllmax"] = llhdDict["lmax"]
self.cachedObjs[expected]["muhat"] = llhdDict["muhat"]
self.cachedObjs[expected]["sigma_mu"] = llhdDict["sigma_mu"]
if "sigma_mu" in llhdDict:
self.cachedObjs[expected]["sigma_mu"] = llhdDict["sigma_mu"]


class TheoryPredictionsCombiner(TheoryPrediction):
Expand Down Expand Up @@ -522,6 +524,8 @@ def setStatsComputer(self):
if any(tp.statsComputer == 'N/A' for tp in self.theoryPredictions):
computer = 'N/A'
else:
from smodels.statistics.statsTools import getStatsComputerModule
StatsComputer = getStatsComputerModule()
computer = StatsComputer.forAnalysesComb(self.theoryPredictions, self.deltas_rel)

self._statsComputer = computer
Expand Down
4 changes: 2 additions & 2 deletions smodels/share/BANNER
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
SModelS -- A tool for interpreting simplified-model results from the LHC,
see https://smodels.github.io/.

Copyright (C) 2012-2024 The SModelS collaboration, smodels-users@lists.oeaw.ac.at
Current members: Mohammad Mahdi Altakach, Sabine Kraml, Andre Lessa, Sahana Narashima, Timothee Pascal, Camila Ramos, Humberto Reyes-Gonzalez, Yoxara Villamizar, Wolfgang Waltenberger
Copyright (C) 2012-2025 The SModelS collaboration, smodels-users@lists.oeaw.ac.at
Current members: Mohammad Mahdi Altakach, Sabine Kraml, Andre Lessa, Sahana Narashima, Timothee Pascal, Camila Ramos, Humberto Reyes-Gonzalez, Theo Reymermier, Yoxara Villamizar, Wolfgang Waltenberger

Previously involved in SModelS: Gael Alguero, Federico Ambrogi, Juhi Dutta, Jan Heisig, Charanjit K. Khosa, Suchita Kulkarni, Ursula Laa, Veronika Magerl, Wolfgang Magerl, Philipp Neuhuber, Doris Proschofsky, Jory Sonneveld, Michael Traub, Matthias Wolf, Alicia Wongel
8 changes: 4 additions & 4 deletions smodels/statistics/basicStats.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@ def CLsfromNLL(
compute the CLs - alpha from the NLLs
TODO: following needs explanation
:param nllA:
:param nll0A:
:param nll:
:param nll0:
:param nllA: negative log likelihood for Asimov data
:param nll0A: negative log likelihood at muhat for Asimov data
:param nll: negative log likelihood
:param nll0: negative log likelihood at muhat
:param return_type: (Text) can be "CLs-alpha", "1-CLs", "CLs" \
CLs-alpha: returns CLs - 0.05 \
1-CLs: returns 1-CLs value \
Expand Down
Loading

0 comments on commit 7a079b0

Please sign in to comment.