Skip to content

Commit

Permalink
Exllamav2_filter
Browse files Browse the repository at this point in the history
Fix comment

Fixed precommit issues

Removed text

Basic draft done

Passed local test

Fixed tests+precommit

Revert change for pyairports

Fixed precommit

Wrap up

Remove | for union

Attempt changing to List

Fixed for 3.8

Adding exllamav2 to optional dependency

Fixed model

Changed to fork

Fix format

Changed order

Skip exllamav2 tests

Attempt fixing coverage

Attempt fix coverage

Remove flash-attn requirement

Fixed fixture tests

Removed lora

Passed coverage

Added back transformers install

Fixed per review

Made coverage 100%
  • Loading branch information
isamu-isozaki authored and rlouf committed Oct 5, 2024
1 parent 4bf1470 commit 80b82f1
Show file tree
Hide file tree
Showing 8 changed files with 641 additions and 193 deletions.
13 changes: 1 addition & 12 deletions outlines/generate/fsm.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,10 @@

from outlines.fsm.guide import RegexGuide
from outlines.generate.api import (
SequenceGenerator,
SequenceGeneratorAdapter,
VisionSequenceGeneratorAdapter,
)
from outlines.models import ExLlamaV2Model, TransformersVision
from outlines.models import TransformersVision
from outlines.samplers import Sampler, multinomial


Expand All @@ -30,13 +29,3 @@ def fsm_vision(model, fsm: interegular.fsm.FSM, sampler: Sampler = multinomial()
guide = RegexGuide.from_interegular_fsm(fsm, model.tokenizer)
logits_processor = GuideLogitsProcessor(tokenizer=model.tokenizer, guide=guide)
return VisionSequenceGeneratorAdapter(model, logits_processor, sampler)


@fsm.register(ExLlamaV2Model)
def fsm_exllamav2(
model, fsm: interegular.fsm.FSM, sampler: Sampler = multinomial()
) -> SequenceGenerator:
fsm = RegexGuide.from_interegular_fsm(fsm, model.tokenizer)
device = model.device
generator = SequenceGenerator(fsm, model, sampler, device)
return generator
18 changes: 1 addition & 17 deletions outlines/generate/regex.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
from functools import singledispatch

from outlines.fsm.guide import RegexGuide
from outlines.generate.api import (
SequenceGenerator,
SequenceGeneratorAdapter,
VisionSequenceGeneratorAdapter,
)
from outlines.models import ExLlamaV2Model, OpenAI, TransformersVision
from outlines.models import OpenAI, TransformersVision
from outlines.samplers import Sampler, multinomial


Expand Down Expand Up @@ -49,20 +47,6 @@ def regex_vision(
return VisionSequenceGeneratorAdapter(model, logits_processor, sampler)


@regex.register(ExLlamaV2Model)
def regex_exllamav2(
model,
regex_str: str,
sampler: Sampler = multinomial(),
) -> SequenceGenerator:
fsm = RegexGuide(regex_str, model.tokenizer)

device = model.device
generator = SequenceGenerator(fsm, model, sampler, device)

return generator


@regex.register(OpenAI)
def regex_openai(
model: OpenAI,
Expand Down
11 changes: 1 addition & 10 deletions outlines/generate/text.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
from functools import singledispatch

from outlines.fsm.guide import StopAtEOSGuide
from outlines.generate.api import (
SequenceGenerator,
SequenceGeneratorAdapter,
VisionSequenceGeneratorAdapter,
)
from outlines.models import ExLlamaV2Model, OpenAI, TransformersVision
from outlines.models import OpenAI, TransformersVision
from outlines.samplers import Sampler, multinomial


Expand Down Expand Up @@ -36,13 +34,6 @@ def text(model, sampler: Sampler = multinomial()) -> SequenceGeneratorAdapter:
return SequenceGeneratorAdapter(model, None, sampler)


@text.register(ExLlamaV2Model)
def text_exllamav2(model, sampler: Sampler = multinomial()) -> SequenceGenerator:
fsm = StopAtEOSGuide(model.tokenizer)
device = model.device
return SequenceGenerator(fsm, model, sampler, device)


@text.register(TransformersVision)
def text_vision(model, sampler: Sampler = multinomial()):
return VisionSequenceGeneratorAdapter(model, None, sampler)
Expand Down
Loading

1 comment on commit 80b82f1

@psych0v0yager
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why were the LoRAs removed?

Please sign in to comment.