Insert empty line between suite and alternative branch after def/class by konstin · Pull Request #12294 · astral-sh/ruff (original) (raw)
ruff-ecosystem
results
Formatter (stable)
ℹ️ ecosystem check encountered format errors. (no format changes; 1 project error)
openai/openai-cookbook (error)
warning: Detected debug build without --no-cache.
error: Failed to parse examples/chatgpt/gpt_actions_library/.gpt_action_getting_started.ipynb:11:1:1: Expected an expression
error: Failed to parse examples/chatgpt/gpt_actions_library/gpt_action_bigquery.ipynb:13:1:1: Expected an expression
Formatter (preview)
ℹ️ ecosystem check detected format changes. (+15 -0 lines in 14 files in 5 projects; 1 project error; 48 projects unchanged)
bokeh/bokeh (+5 -0 lines across 5 files)
ruff format --preview
def yellow(text: str) -> str:
return f"{colorama.Fore.YELLOW}{text}{colorama.Style.RESET_ALL}"
except ModuleNotFoundError:
def _plain(text: str) -> str:
src/bokeh/command/subcommands/file_output.py~L193
def indexed(i: int) -> str:
return filename
else: def indexed(i: int) -> str:
src/bokeh/core/has_props.py~L49
F = TypeVar("F", bound=Callable[..., Any])
def lru_cache(arg: int | None) -> Callable[[F], F]: ...
- else: from functools import lru_cache
src/bokeh/document/locking.py~L93
@wraps(func)
async def _wrapper(*args: Any, **kw: Any) -> None:
await func(*args, **kw)
else: @wraps(func)
tests/unit/bokeh/core/test_has_props.py~L556
class DupeProps(hp.HasProps):
bar = AngleSpec()
bar_units = String()
except RuntimeError as e: assert str(e) == "Two property generators both created DupeProps.bar_units" else:
langchain-ai/langchain (+2 -0 lines across 2 files)
ruff format --preview
libs/community/tests/unit_tests/chat_message_histories/test_sql.py~L10
class Base(DeclarativeBase):
pass
- except ImportError: # for sqlalchemy < 2 from sqlalchemy.ext.declarative import declarative_base
libs/core/langchain_core/messages/utils.py~L765
def list_token_counter(messages: Sequence[BaseMessage]) -> int:
return sum(token_counter(msg) for msg in messages) # type: ignore[arg-type, misc]
else: list_token_counter = token_counter # type: ignore[assignment]
python/typeshed (+4 -0 lines across 3 files)
ruff format --preview
if sys.version_info >= (3, 11):
def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ...
else: def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ...
locals: Mapping[str, Any] | None = None,
eval_str: bool = False,
) -> Self: ...
else: @classmethod def from_callable(cls, obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Self: ...
def __ge__(self, other: Self) -> bool: ...
def __gt__(self, other: Self) -> bool: ...
def __le__(self, other: Self) -> bool: ...
else: def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: ... def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: ...
def __ge__(self, other: Self) -> bool: ...
def __gt__(self, other: Self) -> bool: ...
def __le__(self, other: Self) -> bool: ...
else: def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: ... def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: ...
indico/indico (+3 -0 lines across 3 files)
ruff format --preview
indico/modules/events/contributions/util.py~L110
if not c.speakers:
return True, None
return False, speakers[0].get_full_name(last_name_upper=False, abbrev_first_name=False).lower()
elif sort_by == BOASortField.board_number: key_func = attrgetter('board_number') elif sort_by == BOASortField.session_board_number:
indico/web/flask/templating.py~L63
if isinstance(item, str):
item = item.lower()
return natural_sort_key(item)
else: sort_func = natural_sort_key
# Indico RH
def wrapper(**kwargs):
return obj().process()
else: # Some class we didn't expect. raise ValueError(f'Unexpected view func class: {obj!r}')
mesonbuild/meson-python (+1 -0 lines across 1 file)
ruff format --preview
def read_binary(package: str, resource: str) -> bytes:
return importlib.resources.files(package).joinpath(resource).read_bytes()
- else: read_binary = importlib.resources.read_binary
openai/openai-cookbook (error)
ruff format --preview
warning: Detected debug build without --no-cache.
error: Failed to parse examples/chatgpt/gpt_actions_library/.gpt_action_getting_started.ipynb:11:1:1: Expected an expression
error: Failed to parse examples/chatgpt/gpt_actions_library/gpt_action_bigquery.ipynb:13:1:1: Expected an expression