Datasets:
text stringlengths 558 4.54k | prefix stringlengths 100 2k | middle stringlengths 10 500 | suffix stringlengths 100 2k | type stringclasses 2
values |
|---|---|---|---|---|
<|fim_prefix|>P_SPACE && word->word->space() > 0 &&
!word->word->flag(W_FUZZY_NON) && !word->word->flag(W_FUZZY_SP)))) {
if (!word->word->flag(W_BOL) && word->word->space() > 0 && !word->word->flag(W_FUZZY_NON) &&
!word->word->flag(W_FUZZY_SP)) {
/* Write a space to separate fr... | P_SPACE && word->word->space() > 0 &&
!word->word->flag(W_FUZZY_NON) && !word->word->flag(W_FUZZY_SP)))) {
if (!word->word->flag(W_BOL) && word->word->space() > 0 && !word->word->flag(W_FUZZY_NON) &&
!word->word->flag(W_FUZZY_SP)) {
/* Write a space to separate from preceding g... | uni_ch = kLatinChs[j]; |
break;
}
}
if (uni_ch <= 0xff) {
*ptr++ = static_cast<char>(uni_ch);
last_char_was_tilde = false;
} else {
*ptr++ = kUNLVReject;
last_char_was_tilde = true;
}
}
}
... | ast_based |
<|fim_prefix|>_key_frame");
undo_redo->add_undo_method(ape, "_animation_update_key_frame");
}
undo_redo->commit_action();
//selection.clear();
}
}
void AnimationBezierTrackEdit::_bezier_track_insert_key_at_anim(const Ref<Animation> &p_anim, int p_track, double p_time, real_t p_value, const Vector2 &p_in_han... | _key_frame");
undo_redo->add_undo_method(ape, "_animation_update_key_frame");
}
undo_redo->commit_action();
//selection.clear();
}
}
void AnimationBezierTrackEdit::_bezier_track_insert_key_at_anim(const Ref<Animation> &p_anim, int p_track, double p_time, real_t p_value, const Vector2 &p_in_handle, const Vec... | callable_mp(this, &AnimationBezierTrackEdit::_zoom_callback) | );
play_position = memnew(Control);
play_position->set_mouse_filter(MOUSE_FILTER_PASS);
add_child(play_position);
play_position->set_anchors_and_offsets_preset(PRESET_FULL_RECT);
play_position->connect(SceneStringName(draw), callable_mp(this, &AnimationBezierTrackEdit::_play_position_draw));
set_focus_mode(FOCUS... | ast_based |
<|fim_prefix|>s of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR P... | s of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON... | if (is_hdr) {
target_format = Image::FORMAT_ASTC_8x8_HDR;
} | else {
target_format = Image::FORMAT_ASTC_8x8;
}
block_x = 8;
block_y = 8;
}
// Compress image data and (if required) mipmaps.
const bool has_mipmaps = r_img->has_mipmaps();
int width = r_img->get_width();
int height = r_img->get_height();
int required_width = (width % block_x) != 0 ? width + (block_x ... | ast_based |
<|fim_prefix|>
AccessibilityElement *ae = rid_owner.get_or_null(p_id);
ERR_FAIL_NULL(ae);
AccessibilityElement *other_ae = rid_owner.get_or_null(p_group_id);
ERR_FAIL_NULL(other_ae);
ERR_FAIL_COND(other_ae->window_id != ae->window_id);
_ensure_node(p_id, ae);
accesskit_node_set_member_of(ae->node, (accesskit_no... |
AccessibilityElement *ae = rid_owner.get_or_null(p_id);
ERR_FAIL_NULL(ae);
AccessibilityElement *other_ae = rid_owner.get_or_null(p_group_id);
ERR_FAIL_NULL(other_ae);
ERR_FAIL_COND(other_ae->window_id != ae->window_id);
_ensure_node(p_id, ae);
accesskit_node_set_member_of(ae->node, (accesskit_node_id)p_group_... |
switch (p_live) {
case DisplayServer::AccessibilityLiveMode::LIVE_OFF: {
accesskit_node_set_live(ae->node, ACCESSKIT_LIVE_OFF);
} break;
case DisplayServer::AccessibilityLiveMode::LIVE_POLITE: {
accesskit_node_set_live(ae->node, ACCESSKIT_LIVE_POLITE);
} break;
case DisplayServer::AccessibilityLiveMod... | } break;
}
}
void AccessibilityDriverAccessKit::accessibility_update_add_action(const RID &p_id, DisplayServer::AccessibilityAction p_action, const Callable &p_callable) {
ERR_FAIL_COND_MSG(!in_accessibility_update, "Accessibility updates are only allowed inside the NOTIFICATION_ACCESSIBILITY_UPDATE notification."... | random |
<|fim_prefix|>>GetScaledYResolution(), rect_left_, rect_top_,
rect_width_, rect_height_);
}
/**
* Get a reading-order iterator to the results of LayoutAnalysis and/or
* Recognize. The returned iterator must be deleted after use.
* WARNING! This class points to data held within the Tes... | >GetScaledYResolution(), rect_left_, rect_top_,
rect_width_, rect_height_);
}
/**
* Get a reading-order iterator to the results of LayoutAnalysis and/or
* Recognize. The returned iterator must be deleted after use.
* WARNING! This class points to data held within the TessBaseAPI class... | if (it->Empty(RIL_PARA)) {
continue;
} |
auto block_type = it->BlockType();
switch (block_type) {
case PT_FLOWING_IMAGE:
case PT_HEADING_IMAGE:
case PT_PULLOUT_IMAGE:
case PT_HORZ_LINE:
case PT_VERT_LINE:
// Ignore images and lines for text output.
continue;
case PT_NOISE:
tprintf("TODO: Ple... | ast_based |
<|fim_prefix|> (fgets(pagename, sizeof(pagename), flist) == nullptr) {
break;
}
} else {
if (page >= lines.size()) {
break;
}
snprintf(pagename, sizeof(pagename), "%s", lines[page].c_str());
}
chomp_string(pagename);
Pix *pix = pixRead(pagename);
if (pix == nu... | (fgets(pagename, sizeof(pagename), flist) == nullptr) {
break;
}
} else {
if (page >= lines.size()) {
break;
}
snprintf(pagename, sizeof(pagename), "%s", lines[page].c_str());
}
chomp_string(pagename);
Pix *pix = pixRead(pagename);
if (pix == nullptr) {
... | if (tessedit_page_number >= 0) {
break;
} |
if (!offset) {
break;
}
}
return true;
}
// Master ProcessPages calls ProcessPagesInternal and then does any post-
// processing required due to being in a training mode.
bool TessBaseAPI::ProcessPages(const char *filename, const char *retry_config, int timeout_millisec,
... | ast_based |
<|fim_prefix|> text_buf.draw(get_canvas_item(), string_pos, cc);
float icon_start_height = vofs + rect.size.y / 2.0;
Rect2 remove_rect = Rect2(remove_hpos, icon_start_height - remove->get_height() / 2.0, remove->get_width(), remove->get_height());
if (read_only) {
draw_texture(remove, remove_r... | text_buf.draw(get_canvas_item(), string_pos, cc);
float icon_start_height = vofs + rect.size.y / 2.0;
Rect2 remove_rect = Rect2(remove_hpos, icon_start_height - remove->get_height() / 2.0, remove->get_width(), remove->get_height());
if (read_only) {
draw_texture(remove, remove_rect.position, ... |
while (scale / timeline_v_zoom < min_left_scale * 2) { | scale += step;
}
bool first = true;
int prev_iv = 0;
for (int i = font->get_height(font_size); i < get_size().height; i++) {
float ofs = get_size().height / 2.0 - i;
ofs *= timeline_v_zoom;
ofs += timeline_v_scroll;
int iv = int(ofs / scale);
if (ofs < 0) {
iv -= 1;... | random |
<|fim_prefix|>R_FAIL_COND_MSG(!in_accessibility_update, "Accessibility updates are only allowed inside the NOTIFICATION_ACCESSIBILITY_UPDATE notification.");
AccessibilityElement *ae = rid_owner.get_or_null(p_id);
ERR_FAIL_NULL(ae);
AccessibilityElement *other_ae = rid_owner.get_or_null(p_related_id);
ERR_FAIL_NUL... | R_FAIL_COND_MSG(!in_accessibility_update, "Accessibility updates are only allowed inside the NOTIFICATION_ACCESSIBILITY_UPDATE notification.");
AccessibilityElement *ae = rid_owner.get_or_null(p_id);
ERR_FAIL_NULL(ae);
AccessibilityElement *other_ae = rid_owner.get_or_null(p_related_id);
ERR_FAIL_NULL(other_ae);
... | rid_owner.get_or_null(p_id) | ;
ERR_FAIL_NULL(ae);
AccessibilityElement *other_ae = rid_owner.get_or_null(p_other_id);
ERR_FAIL_NULL(other_ae);
ERR_FAIL_COND(other_ae->window_id != ae->window_id);
_ensure_node(p_id, ae);
accesskit_node_set_previous_on_line(ae->node, (accesskit_node_id)p_other_id.get_id());
}
void AccessibilityDriverAccessKi... | ast_based |
<|fim_prefix|>
#include "android_keys_utils.h"
#include "display_server_android.h"
void AndroidInputHandler::process_joy_event(AndroidInputHandler::JoypadEvent p_event) {
switch (p_event.type) {
case JOY_EVENT_BUTTON:
Input::get_singleton()->joy_button(p_event.device, (JoyButton)p_event.index, p_event.pressed);
... |
#include "android_keys_utils.h"
#include "display_server_android.h"
void AndroidInputHandler::process_joy_event(AndroidInputHandler::JoypadEvent p_event) {
switch (p_event.type) {
case JOY_EVENT_BUTTON:
Input::get_singleton()->joy_button(p_event.device, (JoyButton)p_event.index, p_event.pressed);
break;
ca... | keycode = Key::KEY_DELETE; | } else {
keycode = fix_keycode(unicode, physical_keycode);
}
switch (physical_keycode) {
case Key::SHIFT: {
shift_mem = p_pressed;
} break;
case Key::ALT: {
alt_mem = p_pressed;
} break;
case Key::CTRL: {
control_mem = p_pressed;
} break;
case Key::META: {
meta_mem = p_pressed;
} break... | random |
<|fim_prefix|>
for (int i = 0; i < track_count; ++i) {
if (animation->track_get_type(i) != Animation::TrackType::TYPE_BEZIER || hidden_tracks.has(i) || locked_tracks.has(i)) {
continue;
}
float track_h = animation->bezier_track_interpolate(i, time);
float track_height = _bezier_h_to_pixel(track... |
for (int i = 0; i < track_count; ++i) {
if (animation->track_get_type(i) != Animation::TrackType::TYPE_BEZIER || hidden_tracks.has(i) || locked_tracks.has(i)) {
continue;
}
float track_h = animation->bezier_track_interpolate(i, time);
float track_height = _bezier_h_to_pixel(track_h);
if (... | amr.track = E->get().first; | amr.time = newtime;
to_restore.push_back(amr);
to_restore_handle_modes.push_back(animation->bezier_track_get_key_handle_mode(E->get().first, idx));
}
// 3 - Move the keys (re-insert them).
for (SelectionSet::Element *E = selection.back(); E; E = E->prev()) {
real_t newpos = animation->... | random |
<|fim_prefix|> = [curl, &curlcode](const char *function) {
fprintf(stderr, "Error, %s failed with error %s\n", function, curl_easy_strerror(curlcode));
curl_easy_cleanup(curl);
return false;
};
curlcode = curl_easy_setopt(curl, CURLOPT_URL, filename);
if (curlcode != CURLE_OK) ... | = [curl, &curlcode](const char *function) {
fprintf(stderr, "Error, %s failed with error %s\n", function, curl_easy_strerror(curlcode));
curl_easy_cleanup(curl);
return false;
};
curlcode = curl_easy_setopt(curl, CURLOPT_URL, filename);
if (curlcode != CURLE_OK) {
retu... | {
return error("curl_easy_setopt");
} |
curlcode = curl_easy_perform(curl);
if (curlcode != CURLE_OK) {
return error("curl_easy_perform");
}
curl_easy_cleanup(curl);
data = reinterpret_cast<const l_uint8 *>(buf.data());
}
#else
fprintf(stderr, "Error, this tesseract has no URL support\n");
return false;
#end... | ast_based |
<|fim_prefix|> */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* ... | */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The... | if (p_keycode != Key::META) {
ev->set_meta_pressed(meta_mem);
}
if (p_keycode != Key::CTRL) {
ev->set_ctrl_pressed(control_mem);
} |
}
void AndroidInputHandler::process_key_event(int p_physical_keycode, int p_unicode, int p_key_label, bool p_pressed, bool p_echo) {
static char32_t prev_wc = 0;
char32_t unicode = p_unicode;
if ((p_unicode & 0xfffffc00) == 0xd800) {
if (prev_wc != 0) {
ERR_PRINT("invalid utf16 surrogate input");
}
prev_w... | ast_based |
<|fim_prefix|> ggml_context * ctx = ctx_for_buft(buft);
if (!ctx) {
LLAMA_LOG_ERROR("%s: failed to allocate context for control vector\n", __func__);
return false;
}
ggml_tensor * tensor = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, hparams.n_embd);
tensors.push... | ggml_context * ctx = ctx_for_buft(buft);
if (!ctx) {
LLAMA_LOG_ERROR("%s: failed to allocate context for control vector\n", __func__);
return false;
}
ggml_tensor * tensor = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, hparams.n_embd);
tensors.push_back(tensor);... | }
return true;
} |
// lora
llama_adapter_lora_weight * llama_adapter_lora::get_weight(ggml_tensor * w) {
const std::string name(w->name);
const auto pos = ab_map.find(name);
if (pos != ab_map.end()) {
return &pos->second;
}
return nullptr;
}
static void llama_adapter_lora_init_impl(llama_model & model, co... | random |
<|fim_prefix|> (int)getRMSState() + (int)mCoverageQualityState;
return rating == 4;
}
bool calib::calibController::getFramesNumberState() const
{
return std::max(mCalibData->imagePoints.size(), mCalibData->allCharucoCorners.size()) > mMinFramesNum;
}
bool calib::calibController::getConfidenceIntrer... | (int)getRMSState() + (int)mCoverageQualityState;
return rating == 4;
}
bool calib::calibController::getFramesNumberState() const
{
return std::max(mCalibData->imagePoints.size(), mCalibData->allCharucoCorners.size()) > mMinFramesNum;
}
bool calib::calibController::getConfidenceIntrervalsState() co... | return mean.at<double>(0) / (stdDev.at<double>(0) + 1e-7);
} | }
calib::calibDataController::calibDataController(cv::Ptr<calib::calibrationData> data, int maxFrames, double convParameter) :
mCalibData(data), mParamsFileName("CamParams.xml")
{
mMaxFramesNum = maxFrames;
mAlpha = convParameter;
}
calib::calibDataController::calibDataController()
{
}
void calib::calib... | random |
<|fim_prefix|>/* Copyright 2021 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by appli... | /* Copyright 2021 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or a... | return "Miscellaneous";
case ActivityCategory::kDatasetOp:
return "Dataset Op";
case ActivityCategory::kTpuOp:
return "TPU Op";
case ActivityCategory::kRendezvous:
return "Rendezvous"; | }
}
// An activity to be recorded.
struct Activity {
using Attributes = absl::flat_hash_map<tsl::string, tsl::string>;
// A human readable title of the activity.
tsl::string title;
// The category of the activity.
ActivityCategory category = ActivityCategory::kMisc;
// Key/value pairs that are attached t... | random |
<|fim_prefix|> const bool kRegistered = method.channel_tag() && context->authority().empty();
grpc_call* c_call = nullptr;
if (kRegistered) {
c_call = grpc_channel_create_registered_call(
c_channel_, context->propagate_from_call_,
context->propagation_options_.c_bitmask(), cq->cq(),
met... | const bool kRegistered = method.channel_tag() && context->authority().empty();
grpc_call* c_call = nullptr;
if (kRegistered) {
c_call = grpc_channel_create_registered_call(
c_channel_, context->propagate_from_call_,
context->propagation_options_.c_bitmask(), cq->cq(),
method.channel_ta... | grpc::internal::Call* call) {
ops->FillOps(
call); // Make a copy of call. It's fine since Call just has pointers
}
void* Channel::RegisterMethod(const char* method) {
return grpc_channel_register_call(
c_channel_, method, host_.empty() ? nullptr : host_.c_str(), nullptr... |
grpc_connectivity_state Channel::GetState(bool try_to_connect) {
return grpc_channel_check_connectivity_state(c_channel_, try_to_connect);
}
namespace {
class TagSaver final : public grpc::internal::CompletionQueueTag {
public:
explicit TagSaver(void* tag) : tag_(tag) {}
~TagSaver() override {}
bool Finaliz... | random |
<|fim_prefix|>
int selected_track = 0;
Vector<Rect2> view_rects;
Ref<Texture2D> bezier_icon;
Ref<Texture2D> bezier_handle_icon;
Ref<Texture2D> selected_icon;
RBMap<int, Rect2> subtracks;
enum {
REMOVE_ICON,
LOCK_ICON,
SOLO_ICON,
VISIBILITY_ICON
};
RBMap<int, RBMap<int, Rect2>> subtrack_icons;
Has... |
int selected_track = 0;
Vector<Rect2> view_rects;
Ref<Texture2D> bezier_icon;
Ref<Texture2D> bezier_handle_icon;
Ref<Texture2D> selected_icon;
RBMap<int, Rect2> subtracks;
enum {
REMOVE_ICON,
LOCK_ICON,
SOLO_ICON,
VISIBILITY_ICON
};
RBMap<int, RBMap<int, Rect2>> subtrack_icons;
HashSet<int> lock... | {
int32_t hash = 23;
hash = hash * 31 * hash_one_uint64(p_value.first); |
hash = hash * 31 * hash_one_uint64(p_value.second);
return hash;
}
};
HashMap<Pair<int, int>, Vector2, PairHasher> additional_moving_handle_lefts;
HashMap<Pair<int, int>, Vector2, PairHasher> additional_moving_handle_rights;
void _clear_selection();
void _clear_selection_for_anim(const Ref<Animation> &p... | ast_based |
<|fim_prefix|>ling_selection_pivot.y) * (scaling_selection_scale.y - 1);
}
}
if (moving_inserted_key && moving_selection_from_track == p_track) {
if (moving_selection_from_key == i) {
Animation::HandleMode handle_mode = animation->bezier_track_get_key_handle_mode(p_track, i);
if (handle_mode != Anima... | ling_selection_pivot.y) * (scaling_selection_scale.y - 1);
}
}
if (moving_inserted_key && moving_selection_from_track == p_track) {
if (moving_selection_from_key == i) {
Animation::HandleMode handle_mode = animation->bezier_track_get_key_handle_mode(p_track, i);
if (handle_mode != Animation::HANDLE_M... | int j = from_x; | j <= to_x; j++) {
float t = (j - limit) / scale + timeline->get_value();
float h;
if (j == point_end) {
h = end.y; // Make sure it always connects.
} else if (j == point_start) {
h = start.y; // Make sure it always connects.
} else { // Custom interpolation, used because it needs to show paths... | ast_based |
<|fim_prefix|> }
Ref<Texture2D> t;
if (animation) {
t = frames->get_frame_texture(animation, frame);
}
if (t.is_null()) {
return Rect2();
}
Size2 s = t->get_size();
Point2 ofs = offset;
if (centered) {
ofs -= s / 2;
}
if (s == Size2(0, 0)) {
s = Size2(1, 1);
}
return Rect2(ofs, s);
}
void Anima... | }
Ref<Texture2D> t;
if (animation) {
t = frames->get_frame_texture(animation, frame);
}
if (t.is_null()) {
return Rect2();
}
Size2 s = t->get_size();
Point2 ofs = offset;
if (centered) {
ofs -= s / 2;
}
if (s == Size2(0, 0)) {
s = Size2(1, 1);
}
return Rect2(ofs, s);
}
void AnimatedSprite2D::_... | ERR_FAIL_COND(ae.is_null()); |
Rect2 dst_rect = _get_rect();
DisplayServer::get_singleton()->accessibility_update_set_role(ae, DisplayServer::AccessibilityRole::ROLE_IMAGE);
DisplayServer::get_singleton()->accessibility_update_set_transform(ae, get_transform());
DisplayServer::get_singleton()->accessibility_update_set_bounds(ae, dst_re... | random |
<|fim_prefix|>/**************************************************************************/
/* engine.h */
/**************************************************************************/
/* This file is part of: *... | /**************************************************************************/
/* engine.h */
/**************************************************************************/
/* This file is part of: */
/* ... | /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DA... | /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/**************************************************************************/
#pragma once
#include "core/os/main_loop.h"
#include "core/string/ustring.h"
#include "... | random |
<|fim_prefix|> handle_mode,
handle_set_mode);
}
// 4 - (undo) Remove inserted keys.
for (SelectionSet::Element *E = selection.back(); E; E = E->prev()) {
real_t newpos = animation->track_get_key_time(E->get().first, E->get().second) + moving_selection_offset.x;
undo_redo->add_undo_meth... | handle_mode,
handle_set_mode);
}
// 4 - (undo) Remove inserted keys.
for (SelectionSet::Element *E = selection.back(); E; E = E->prev()) {
real_t newpos = animation->track_get_key_time(E->get().first, E->get().second) + moving_selection_offset.x;
undo_redo->add_undo_method(animation.p... | {
real_t oldpos = animation->track_get_key_time(E->get().first, E->get().second); |
real_t newpos = oldpos + moving_selection_offset.x;
undo_redo->add_do_method(this, "_select_at_anim", animation, E->get().first, newpos, i == 0);
undo_redo->add_undo_method(this, "_select_at_anim", animation, E->get().first, oldpos, i == 0);
i++;
}
AnimationPlayerEditor *ape = AnimationPl... | ast_based |
<|fim_prefix|>
lc.a *= 0.5;
draw_line(Point2(limit, i), Point2(right_limit, i), lc, Math::round(EDSCALE));
Color c = color;
c.a *= 0.5;
draw_string(font, Point2(limit + 8, i - 2), TS->format_number(rtos(Math::snapped((iv + 1) * scale, step))), HORIZONTAL_ALIGNMENT_LEFT, -1, font_size, c);
... |
lc.a *= 0.5;
draw_line(Point2(limit, i), Point2(right_limit, i), lc, Math::round(EDSCALE));
Color c = color;
c.a *= 0.5;
draw_string(font, Point2(limit + 8, i - 2), TS->format_number(rtos(Math::snapped((iv + 1) * scale, step))), HORIZONTAL_ALIGNMENT_LEFT, -1, font_size, c);
}
f... | {
offset += moving_selection_offset.x;
value += moving_selection_offset.y; |
} else if (scaling_selection) {
offset += -scaling_selection_offset.x + (offset - scaling_selection_pivot.x) * (scaling_selection_scale.x - 1);
value += -scaling_selection_offset.y + (value - scaling_selection_pivot.y) * (scaling_selection_scale.y - 1);
}
}
Vector2 pos((offse... | ast_based |
<|fim_prefix|>
// set tensor data
{
llama_file gguf_file(path_lora, "rb");
std::vector<uint8_t> read_buf;
auto set_tensor = [&](ggml_tensor * orig, ggml_tensor * dev) {
size_t offs = gguf_get_data_offset(ctx_gguf.get()) + gguf_get_tensor_offset(ctx_gguf.get(), gguf_find_tenso... |
// set tensor data
{
llama_file gguf_file(path_lora, "rb");
std::vector<uint8_t> read_buf;
auto set_tensor = [&](ggml_tensor * orig, ggml_tensor * dev) {
size_t offs = gguf_get_data_offset(ctx_gguf.get()) + gguf_get_tensor_offset(ctx_gguf.get(), gguf_find_tensor(ctx_gguf.get... | {
if (buf_size > 0) {
buf[0] = '\0';
}
return -1;
} |
auto it = adapter->gguf_kv.begin();
std::advance(it, i);
return snprintf(buf, buf_size, "%s", it->first.c_str());
}
int32_t llama_adapter_meta_val_str_by_index(const llama_adapter_lora * adapter, int32_t i, char * buf, size_t buf_size) {
if (i < 0 || i >= (int)adapter->gguf_kv.size()) {
if (bu... | ast_based |
End of preview. Expand in Data Studio
fim-dataset
A Fill-in-the-Middle (FIM) dataset for code autocompletion.
Dataset Description
This dataset is designed for training code autocompletion models using the Fill-in-the-Middle (FIM) approach. The dataset contains code snippets formatted with FIM special tokens:
<fim_prefix>: Code before the completion point<fim_suffix>: Code after the completion point<fim_middle>: The code to be completed
Dataset Structure
Data Fields
text: The formatted FIM instruction containing prefix, suffix, and middle tokens
Data Splits
| Split | Examples |
|---|---|
| train | 42,922 |
| validation | 2,259 |
Usage
from datasets import load_dataset
# Load the dataset
dataset = load_dataset("KrzTyb/fim-dataset")
# Access train and validation splits
train_data = dataset["train"]
val_data = dataset["validation"]
# Example: Print first training example
print(train_data[0]["text"])
Training
This dataset can be used to fine-tune code language models for autocompletion:
from transformers import AutoModelForCausalLM, AutoTokenizer, Trainer
from datasets import load_dataset
# Load model and tokenizer
model = AutoModelForCausalLM.from_pretrained("Qwen/Qwen2.5-Coder-0.5B")
tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen2.5-Coder-0.5B")
# Add FIM tokens
fim_tokens = ["<fim_prefix>", "<fim_suffix>", "<fim_middle>", "<fim_pad>"]
tokenizer.add_special_tokens({"additional_special_tokens": fim_tokens})
model.resize_token_embeddings(len(tokenizer))
# Load dataset
dataset = load_dataset("KrzTyb/fim-dataset")
# Train (see full training script for details)
trainer = Trainer(
model=model,
train_dataset=dataset["train"],
eval_dataset=dataset["validation"],
# ... other training arguments
)
trainer.train()
Citation
If you use this dataset, please cite:
@dataset{KrzTyb_fim-dataset},
title={fim-dataset},
author={Dataset Creator},
year={2025},
publisher={Hugging Face},
howpublished={\url{https://huggingface.co/datasets/KrzTyb/fim-dataset}}
}
License
Please specify the license for your dataset.
- Downloads last month
- 31