Skip to content

Commit

Permalink
Merge pull request #28 from microsoft/v0.2.4
Browse files Browse the repository at this point in the history
v0.2.4
  • Loading branch information
qingyun-wu authored Feb 17, 2021
2 parents d18d292 + 79a851e commit 2d3bd84
Show file tree
Hide file tree
Showing 11 changed files with 965 additions and 431 deletions.
16 changes: 11 additions & 5 deletions flaml/automl.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,7 +402,7 @@ def _validate_data(self, X_train_all, y_train_all, dataframe, label,
self._X_train_all, self._y_train_all = \
self._transformer.fit_transform(X, y, self._state.task)
self._label_transformer = self._transformer.label_transformer

self._sample_weight_full = self._state.fit_kwargs.get('sample_weight')
if X_val is not None and y_val is not None:
if not (isinstance(X_val, np.ndarray) or
issparse(X_val) or
Expand Down Expand Up @@ -446,7 +446,8 @@ def _prepare_data(self,
self._X_train_all, self._y_train_all
if issparse(X_train_all):
X_train_all = X_train_all.tocsr()
if self._state.task != 'regression':
if self._state.task != 'regression' and self._state.fit_kwargs.get(
'sample_weight') is None:
# logger.info(f"label {pd.unique(y_train_all)}")
label_set, counts = np.unique(y_train_all, return_counts=True)
# augment rare classes
Expand Down Expand Up @@ -1093,8 +1094,9 @@ def _search(self):
self._state.best_loss))
else:
logger.info(f"no enough budget for learner {estimator}")
self.estimator_list.remove(estimator)
self._estimator_index -= 1
if self._estimator_index is not None:
self.estimator_list.remove(estimator)
self._estimator_index -= 1
if self._retrain_full and best_config_sig and not better and (
self._search_states[self._best_estimator].sample_size ==
self._state.data_size) and (est_retrain_time <=
Expand Down Expand Up @@ -1151,7 +1153,11 @@ def _search(self):
stacker = Stacker(estimators, best_m,
n_jobs=self._state.n_jobs,
passthrough=True)
stacker.fit(self._X_train_all, self._y_train_all)
if self._sample_weight_full is not None:
self._state.fit_kwargs[
'sample_weight'] = self._sample_weight_full
stacker.fit(self._X_train_all, self._y_train_all,
**self._state.fit_kwargs)
logger.info(f'ensemble: {stacker}')
self._trained_estimator = stacker
self._trained_estimator.model = stacker
Expand Down
4 changes: 2 additions & 2 deletions flaml/searcher/flow2.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,8 +121,8 @@ def _init_search(self):
self._unordered_cat_hp = {}
self._cat_hp_cost = {}
for key, domain in self.space.items():
assert not isinstance(domain, dict), \
key+"'s domain is grid search which is not supported in FLOW2."
assert not (isinstance(domain, dict) and 'grid_search' in domain
), key+"'s domain is grid search which is not supported in FLOW2."
if callable(getattr(domain, 'get_sampler', None)):
self._tunable_keys.append(key)
sampler = domain.get_sampler()
Expand Down
3 changes: 3 additions & 0 deletions flaml/tune/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ The API is compatible with ray tune.
Example:

```python
# require: pip install flaml[blendsearch]
from flaml import tune
import time

Expand Down Expand Up @@ -42,6 +43,7 @@ print(analysis.best_config) # the best config

Or, using ray tune's API:
```python
# require: pip install flaml[blendsearch] ray[tune]
from ray import tune as raytune
from flaml import CFO, BlendSearch
import time
Expand Down Expand Up @@ -146,6 +148,7 @@ based on optimism in face of uncertainty.
Example:

```python
# require: pip install flaml[blendsearch]
from flaml import BlendSearch
tune.run(...
search_alg = BlendSearch(points_to_evaluate=[init_config]),
Expand Down
2 changes: 1 addition & 1 deletion flaml/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.2.3"
__version__ = "0.2.4"
29 changes: 19 additions & 10 deletions notebook/finetune_transformer_demo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,16 @@
"source": [
"This notebook uses the Huggingface transformers library to finetune a transformer model.\n",
"\n",
"**Requirements.** This notebook has additional requirements:\n",
"\n",
"```bash\n",
"pip install -r transformers_requirements.txt\n",
"```"
"**Requirements.** This notebook has additional requirements:"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!pip install torch transformers datasets ipywidgets"
]
},
{
Expand Down Expand Up @@ -699,7 +704,7 @@
"source": [
"### Step 3. Launch with `flaml.tune.run`\n",
"\n",
"We are now ready to laungh the tuning using `flaml.tune.run`:"
"We are now ready to launch the tuning using `flaml.tune.run`:"
],
"cell_type": "markdown",
"metadata": {}
Expand Down Expand Up @@ -766,9 +771,13 @@
],
"metadata": {
"kernelspec": {
"display_name": "flaml",
"language": "python",
"name": "flaml"
"name": "python3",
"display_name": "Python 3.7.7 64-bit ('flaml': conda)",
"metadata": {
"interpreter": {
"hash": "bfcd9a6a9254a5e160761a1fd7a9e444f011592c6770d9f4180dde058a9df5dd"
}
}
},
"language_info": {
"codemirror_mode": {
Expand All @@ -780,7 +789,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.6"
"version": "3.7.7-final"
}
},
"nbformat": 4,
Expand Down
Loading

0 comments on commit 2d3bd84

Please sign in to comment.