Skip to content

Commit

Permalink
Merge branch 'main' of github.com:tanganke/fusion_bench
Browse files Browse the repository at this point in the history
  • Loading branch information
tanganke committed Nov 20, 2024
2 parents 4afdfab + 0b5c285 commit f40d99b
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 20 deletions.
18 changes: 0 additions & 18 deletions fusion_bench/compat/method/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,35 +16,17 @@ class AlgorithmFactory:
"clip_finetune": ".classification.clip_finetune.ImageClassificationFineTuningForCLIP",
# analysis
# model merging methods
"simple_average": ".simple_average.SimpleAverageAlgorithm",
"weighted_average": ".weighted_average.weighted_average.WeightedAverageAlgorithm",
"weighted_average_for_llama": ".weighted_average.llama.WeightedAverageForLLama",
"task_arithmetic": ".task_arithmetic.TaskArithmeticAlgorithm",
"ties_merging": ".ties_merging.ties_merging.TiesMergingAlgorithm",
"clip_task_wise_adamerging": ".adamerging.clip_task_wise_adamerging.CLIPTaskWiseAdaMergingAlgorithm",
"clip_layer_wise_adamerging": ".adamerging.clip_layer_wise_adamerging.CLIPLayerWiseAdaMergingAlgorithm",
"singular_projection_merging": "fusion_bench.method.smile_upscaling.singular_projection_merging.SingularProjectionMergingAlgorithm",
"pwe_moe_ls_for_clip": ".pwe_moe.clip_pwe_moe.PWEMoELinearScalarizationForCLIP",
"pwe_moe_epo_for_clip": ".pwe_moe.clip_pwe_moe.PWEMoExactParetoOptimalForCLIP",
# plug-and-play model merging methods
"clip_concrete_task_arithmetic": ".concrete_subspace.clip_concrete_task_arithmetic.ConcreteTaskArithmeticAlgorithmForCLIP",
"clip_concrete_task_wise_adamerging": ".concrete_subspace.clip_concrete_adamerging.ConcreteTaskWiseAdaMergingForCLIP",
"clip_concrete_layer_wise_adamerging": ".concrete_subspace.clip_concrete_adamerging.ConcreteLayerWiseAdaMergingForCLIP",
# model mixing methods
"depth_upscaling": ".depth_upscaling.DepthUpscalingAlgorithm",
"mixtral_moe_upscaling": ".mixture_of_experts.mixtral_upcycling.MixtralUpscalingAlgorithm",
"mixtral_for_causal_lm_moe_upscaling": ".mixture_of_experts.mixtral_upcycling.MixtralForCausalLMUpscalingAlgorithm",
"mixtral_moe_merging": ".mixture_of_experts.mixtral_merging.MixtralMoEMergingAlgorithm",
"mixtral_for_causal_lm_merging": ".mixture_of_experts.mixtral_merging.MixtralForCausalLMMergingAlgorithm",
"clip_weight_ensembling_moe": ".we_moe.clip_we_moe.CLIPWeightEnsemblingMoEAlgorithm",
"model_recombination": ".model_recombination.ModelRecombinationAlgorithm",
"smile_upscaling": ".smile_upscaling.smile_upscaling.SmileUpscalingAlgorithm",
"sparse_clip_weight_ensembling_moe": "fusion_bench.method.SparseCLIPWeightEnsemblingMoEAlgorithm",
"smile_mistral_upscaling": ".smile_upscaling.smile_mistral_upscaling.SmileMistralUpscalingAlgorithm",
# pruning methods
"magnitude_diff_pruning": ".pruning.MagnitudeDiffPruningAlgorithm",
"magnitude_pruning_for_llama": ".pruning.llama_magnitude_prune.MagnitudePruningForLlama",
"wanda_pruning_for_llama": ".pruning.llama_wanda_prune.WandaPruningForLlama",
}

@staticmethod
Expand Down
4 changes: 2 additions & 2 deletions fusion_bench/method/depth_upscaling/depth_upscaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,9 @@ class DepthUpscalingAlgorithm(BaseAlgorithm):
"layer_indices": "layer_indices",
}

def __init__(self, layer_indices: list, **kwargs):
super().__init__(**kwargs)
def __init__(self, layer_indices: Union[str, List[int]], **kwargs):
self.layer_indices = layer_indices
super().__init__(**kwargs)

@torch.no_grad()
def run(self, modelpool: nn.ModuleList | BaseModelPool) -> nn.ModuleList:
Expand Down

0 comments on commit f40d99b

Please sign in to comment.