Mercurial > hg > svgui
diff layer/Colour3DPlotLayer.h @ 1105:ea5ae9dd10ba spectrogram-minor-refactor
Convert ColourScaleType into an enum class
author | Chris Cannam |
---|---|
date | Thu, 14 Jul 2016 16:52:16 +0100 |
parents | 46cc4644206d |
children | 6d720fe1c8cc |
line wrap: on
line diff
--- a/layer/Colour3DPlotLayer.h Thu Jul 14 16:12:05 2016 +0100 +++ b/layer/Colour3DPlotLayer.h Thu Jul 14 16:52:16 2016 +0100 @@ -90,8 +90,8 @@ virtual void setProperty(const PropertyName &, int value); virtual void setProperties(const QXmlAttributes &); - void setColourScale(ColourScale::Scale); - ColourScale::Scale getColourScale() const { return m_colourScale; } + void setColourScale(ColourScaleType); + ColourScaleType getColourScale() const { return m_colourScale; } void setColourMap(int map); int getColourMap() const; @@ -163,7 +163,7 @@ mutable int m_cacheValidStart; mutable int m_cacheValidEnd; - ColourScale::Scale m_colourScale; + ColourScaleType m_colourScale; bool m_colourScaleSet; int m_colourMap; float m_gain; @@ -182,8 +182,8 @@ bool m_synchronous; - static ColourScale::Scale convertToColourScale(int value); - static int convertFromColourScale(ColourScale::Scale); + static ColourScaleType convertToColourScale(int value); + static int convertFromColourScale(ColourScaleType); static std::pair<ColumnNormalization, bool> convertToColumnNorm(int value); static int convertFromColumnNorm(ColumnNormalization norm, bool visible);