20 #include "shared/shared.h"
21 #include "common/bsp.h"
22 #include "common/cmd.h"
23 #include "common/common.h"
24 #include "common/cvar.h"
25 #include "common/files.h"
26 #include "common/math.h"
27 #include "client/video.h"
29 #include "refresh/refresh.h"
30 #include "refresh/images.h"
31 #include "refresh/models.h"
32 #include "system/hunk.h"
36 #include "../../client/client.h"
37 #include "../../client/ui/ui.h"
41 #include <vulkan/vulkan.h>
43 #include <SDL_vulkan.h>
75 #ifdef VKPT_DEVICE_GROUPS
76 cvar_t *cvar_sli = NULL;
79 #ifdef VKPT_IMAGE_DUMPS
80 cvar_t *cvar_dump_image = NULL;
86 #define UBO_CVAR_DO(name, default_value) cvar_t *cvar_##name;
155 Com_Printf(
"Resolution scale: %d%%\n",
scr_viewsize->integer);
181 return a.width == b.width && a.height == b.height;
192 result.width = (result.width + 7) & ~7;
193 result.height = (result.height + 7) & ~7;
213 result.width = (result.width + 7) & ~7;
214 result.height = (result.height + 7) & ~7;
235 if((init->
flags & init_flags) != init_flags)
251 return VK_RESULT_MAX_ENUM;
268 if((init->
flags & destroy_flags) != destroy_flags)
276 ? !(init->
destroy() == VK_SUCCESS)
295 FILE *f = _popen(
"compile_shaders.bat",
"r");
297 FILE *f = popen(
"make -j compile_shaders",
"r");
300 while(fgets(buf,
sizeof buf, f)) {
301 Com_Printf(
"%s", buf);
345 Com_EPrintf(
"Cannot find material '%s' in table\n");
361 Com_EPrintf(
"Cannot find material '%s' in table\n");
383 #define _VK_INST_EXTENSION_DO(a) PFN_##a q##a;
385 #undef _VK_INST_EXTENSION_DO
387 #define _VK_EXTENSION_DO(a) PFN_##a q##a;
389 #undef _VK_EXTENSION_DO
391 #ifdef VKPT_ENABLE_VALIDATION
392 const char *vk_requested_layers[] = {
393 "VK_LAYER_LUNARG_standard_validation"
398 VK_EXT_DEBUG_UTILS_EXTENSION_NAME,
399 VK_EXT_DEBUG_REPORT_EXTENSION_NAME,
400 #ifdef VKPT_DEVICE_GROUPS
401 VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME,
406 VK_NV_RAY_TRACING_EXTENSION_NAME,
407 VK_KHR_SWAPCHAIN_EXTENSION_NAME,
408 VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME,
409 VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME,
410 #ifdef VKPT_ENABLE_VALIDATION
411 VK_EXT_DEBUG_MARKER_EXTENSION_NAME,
413 #ifdef VKPT_DEVICE_GROUPS
414 VK_KHR_DEVICE_GROUP_EXTENSION_NAME,
415 VK_KHR_BIND_MEMORY_2_EXTENSION_NAME,
420 .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
421 .pApplicationName =
"quake 2 pathtracing",
422 .applicationVersion = VK_MAKE_VERSION(1, 0, 0),
423 .pEngineName =
"vkpt",
424 .engineVersion = VK_MAKE_VERSION(1, 0, 0),
425 .apiVersion = VK_API_VERSION_1_1,
436 uint32_t *num_extensions,
437 VkExtensionProperties **
ext)
439 _VK(vkEnumerateInstanceExtensionProperties(layer, num_extensions, NULL));
440 *
ext = malloc(
sizeof(**
ext) * *num_extensions);
441 _VK(vkEnumerateInstanceExtensionProperties(layer, num_extensions, *
ext));
446 uint32_t *num_layers,
447 VkLayerProperties **
ext)
449 _VK(vkEnumerateInstanceLayerProperties(num_layers, NULL));
450 *
ext = malloc(
sizeof(**
ext) * *num_layers);
451 _VK(vkEnumerateInstanceLayerProperties(num_layers, *
ext));
457 #ifdef VKPT_ENABLE_VALIDATION
458 for (
int i = 0; i <
LENGTH(vk_requested_layers); i++)
459 if(!strcmp(name, vk_requested_layers[i]))
465 static VKAPI_ATTR VkBool32 VKAPI_CALL
467 VkDebugUtilsMessageSeverityFlagBitsEXT severity,
468 VkDebugUtilsMessageTypeFlagsEXT type,
469 const VkDebugUtilsMessengerCallbackDataEXT* callback_data,
472 Com_EPrintf(
"validation layer %i %i: %s\n", (int32_t)type, (int32_t)severity, callback_data->pMessage);
473 debug_output(
"Vulkan error: %s\n", callback_data->pMessage);
475 if (callback_data->cmdBufLabelCount)
478 for (uint32_t i = 0; i < callback_data->cmdBufLabelCount; ++i)
480 VkDebugUtilsLabelEXT* label = &callback_data->pCmdBufLabels[i];
481 Com_EPrintf(
"%s ~ ", label->pLabelName);
486 if (callback_data->objectCount)
488 for (uint32_t i = 0; i < callback_data->objectCount; ++i)
490 VkDebugUtilsObjectNameInfoEXT* obj = &callback_data->pObjects[i];
491 Com_EPrintf(
"--- %s %i\n", obj->pObjectName, (int32_t)obj->objectType);
502 const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo,
503 const VkAllocationCallbacks* pAllocator,
504 VkDebugUtilsMessengerEXT* pCallback)
506 PFN_vkCreateDebugUtilsMessengerEXT func = (PFN_vkCreateDebugUtilsMessengerEXT)
507 vkGetInstanceProcAddr(instance,
"vkCreateDebugUtilsMessengerEXT");
509 return func(instance, pCreateInfo, pAllocator, pCallback);
510 return VK_ERROR_EXTENSION_NOT_PRESENT;
516 VkDebugUtilsMessengerEXT callback,
517 const VkAllocationCallbacks* pAllocator)
519 PFN_vkDestroyDebugUtilsMessengerEXT func = (PFN_vkDestroyDebugUtilsMessengerEXT)
520 vkGetInstanceProcAddr(instance,
"vkDestroyDebugUtilsMessengerEXT");
522 func(instance, callback, pAllocator);
525 return VK_ERROR_EXTENSION_NOT_PRESENT;
532 VkSurfaceCapabilitiesKHR surf_capabilities;
535 uint32_t num_formats = 0;
537 VkSurfaceFormatKHR *avail_surface_formats = alloca(
sizeof(VkSurfaceFormatKHR) * num_formats);
546 VkFormat acceptable_formats[] = {
547 VK_FORMAT_R8G8B8A8_SRGB, VK_FORMAT_B8G8R8A8_SRGB,
552 for(
int i = 0; i <
LENGTH(acceptable_formats); i++) {
553 for(
int j = 0; j < num_formats; j++)
554 if(acceptable_formats[i] == avail_surface_formats[j].format) {
561 uint32_t num_present_modes = 0;
563 VkPresentModeKHR *avail_present_modes = alloca(
sizeof(VkPresentModeKHR) * num_present_modes);
565 qboolean immediate_mode_available = qfalse;
567 for (
int i = 0; i < num_present_modes; i++) {
568 if (avail_present_modes[i] == VK_PRESENT_MODE_IMMEDIATE_KHR) {
569 immediate_mode_available = qtrue;
576 }
else if (immediate_mode_available) {
582 if(surf_capabilities.currentExtent.width != ~0u) {
593 uint32_t num_images = 2;
595 if(surf_capabilities.maxImageCount > 0)
596 num_images =
MIN(num_images, surf_capabilities.maxImageCount);
598 VkSwapchainCreateInfoKHR swpch_create_info = {
599 .sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
601 .minImageCount = num_images,
605 .imageArrayLayers = 1,
606 .imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
607 | VK_IMAGE_USAGE_TRANSFER_SRC_BIT
608 | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
609 .imageSharingMode = VK_SHARING_MODE_EXCLUSIVE,
610 .queueFamilyIndexCount = 0,
611 .pQueueFamilyIndices = NULL,
612 .preTransform = surf_capabilities.currentTransform,
613 .compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
617 .oldSwapchain = VK_NULL_HANDLE,
621 Com_EPrintf(
"error creating swapchain\n");
632 VkImageViewCreateInfo img_create_info = {
633 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
635 .viewType = VK_IMAGE_VIEW_TYPE_2D,
639 VK_COMPONENT_SWIZZLE_R,
640 VK_COMPONENT_SWIZZLE_G,
641 VK_COMPONENT_SWIZZLE_B,
642 VK_COMPONENT_SWIZZLE_A
645 .subresourceRange = {
646 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
655 Com_EPrintf(
"error creating image view!");
666 .subresourceRange = {
667 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
675 .oldLayout = VK_IMAGE_LAYOUT_UNDEFINED,
676 .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
691 VkCommandPoolCreateInfo cmd_pool_create_info = {
692 .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
694 .flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
713 VkSemaphoreCreateInfo semaphore_info = { .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO };
729 VkFenceCreateInfo fence_info = {
730 .sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
731 .flags = VK_FENCE_CREATE_SIGNALED_BIT,
747 Com_Printf(
"----- init_vulkan -----\n");
751 Com_Printf(
"Available Vulkan layers: \n");
754 Com_Printf(
" %s%s\n",
qvk.
layers[i].layerName, requested ?
" (requested)" :
"");
760 Com_EPrintf(
"Couldn't get SDL2 Vulkan extension count\n");
766 Com_EPrintf(
"Couldn't get SDL2 Vulkan extensions\n");
770 Com_Printf(
"Vulkan instance extensions required by SDL2: \n");
776 char **
ext = alloca(
sizeof(
char *) * num_inst_ext_combined);
781 Com_Printf(
"Supported Vulkan instance extensions: \n");
784 for(
int j = 0; j < num_inst_ext_combined; j++) {
790 Com_Printf(
" %s%s\n",
qvk.
extensions[i].extensionName, requested ?
" (requested)" :
"");
794 VkInstanceCreateInfo inst_create_info = {
795 .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
797 #ifdef VKPT_ENABLE_VALIDATION
798 .enabledLayerCount =
LENGTH(vk_requested_layers),
799 .ppEnabledLayerNames = vk_requested_layers,
801 .enabledExtensionCount = num_inst_ext_combined,
802 .ppEnabledExtensionNames = (
const char *
const*)
ext,
805 VkResult result = vkCreateInstance(&inst_create_info, NULL, &
qvk.
instance);
806 if (result != VK_SUCCESS)
812 #define _VK_INST_EXTENSION_DO(a) \
813 q##a = (PFN_##a) vkGetInstanceProcAddr(qvk.instance, #a); \
814 if (!q##a) { Com_EPrintf("warning: could not load instance function %s\n", #a); }
816 #undef _VK_INST_EXTENSION_DO
819 VkDebugUtilsMessengerCreateInfoEXT dbg_create_info = {
820 .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
822 VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT
823 | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT,
825 VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT
826 | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,
835 Com_EPrintf(
"SDL2 could not create a surface!\n");
840 uint32_t num_devices = 0;
841 _VK(vkEnumeratePhysicalDevices(
qvk.
instance, &num_devices, NULL));
844 VkPhysicalDevice *devices = alloca(
sizeof(VkPhysicalDevice) *num_devices);
845 _VK(vkEnumeratePhysicalDevices(
qvk.
instance, &num_devices, devices));
847 #ifdef VKPT_DEVICE_GROUPS
848 uint32_t num_device_groups = 0;
850 if (cvar_sli->integer)
851 _VK(qvkEnumeratePhysicalDeviceGroupsKHR(
qvk.
instance, &num_device_groups, NULL));
853 VkDeviceGroupDeviceCreateInfoKHR device_group_create_info;
854 VkPhysicalDeviceGroupPropertiesKHR device_group_info;
856 if(num_device_groups > 0) {
858 num_device_groups = 1;
859 _VK(qvkEnumeratePhysicalDeviceGroupsKHR(
qvk.
instance, &num_device_groups, &device_group_info));
865 device_group_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR;
866 device_group_create_info.pNext = NULL;
867 device_group_create_info.physicalDeviceCount = device_group_info.physicalDeviceCount;
868 device_group_create_info.pPhysicalDevices = device_group_info.physicalDevices;
872 qvk.device_group_physical_devices[i] = device_group_create_info.pPhysicalDevices[i];
878 int picked_device = -1;
879 for(
int i = 0; i < num_devices; i++) {
880 VkPhysicalDeviceProperties dev_properties;
881 VkPhysicalDeviceFeatures dev_features;
882 vkGetPhysicalDeviceProperties(devices[i], &dev_properties);
883 vkGetPhysicalDeviceFeatures (devices[i], &dev_features);
885 Com_Printf(
"Physical device %d: %s\n", i, dev_properties.deviceName);
886 Com_Printf(
"Max number of allocations: %d\n", dev_properties.limits.maxMemoryAllocationCount);
888 vkEnumerateDeviceExtensionProperties(devices[i], NULL, &num_ext, NULL);
890 VkExtensionProperties *ext_properties = alloca(
sizeof(VkExtensionProperties) * num_ext);
891 vkEnumerateDeviceExtensionProperties(devices[i], NULL, &num_ext, ext_properties);
893 Com_Printf(
"Supported Vulkan device extensions:\n");
894 for(
int j = 0; j < num_ext; j++) {
895 Com_Printf(
" %s\n", ext_properties[j].extensionName);
896 if(!strcmp(ext_properties[j].extensionName, VK_NV_RAY_TRACING_EXTENSION_NAME)) {
897 if(picked_device < 0)
903 if(picked_device < 0) {
904 Com_Error(ERR_FATAL,
"No ray tracing capable GPU found.");
910 VkPhysicalDeviceProperties dev_properties;
911 vkGetPhysicalDeviceProperties(devices[picked_device], &dev_properties);
913 Com_Printf(
"Picked physical device %d: %s\n", picked_device, dev_properties.deviceName);
916 if (dev_properties.vendorID == 0x10de)
918 uint32_t driver_major = (dev_properties.driverVersion >> 22) & 0x3ff;
919 uint32_t driver_minor = (dev_properties.driverVersion >> 14) & 0xff;
921 Com_Printf(
"NVIDIA GPU detected. Driver version: %u.%02u\n", driver_major, driver_minor);
923 uint32_t required_major = 0;
924 uint32_t required_minor = 0;
928 if (driver_major < required_major || driver_major == required_major && driver_minor < required_minor)
930 Com_Error(ERR_FATAL,
"This game requires NVIDIA Graphics Driver version to be at least %u.%02u, while the installed version is %u.%02u.\nPlease update the NVIDIA Graphics Driver.",
931 required_major, required_minor, driver_major, driver_minor);
941 uint32_t num_queue_families = 0;
942 vkGetPhysicalDeviceQueueFamilyProperties(
qvk.
physical_device, &num_queue_families, NULL);
943 VkQueueFamilyProperties *queue_families = alloca(
sizeof(VkQueueFamilyProperties) * num_queue_families);
944 vkGetPhysicalDeviceQueueFamilyProperties(
qvk.
physical_device, &num_queue_families, queue_families);
952 for(
int i = 0; i < num_queue_families; i++) {
953 if(!queue_families[i].queueCount)
955 VkBool32 present_support = 0;
958 const int supports_graphics = queue_families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT;
959 const int supports_compute = queue_families[i].queueFlags & VK_QUEUE_COMPUTE_BIT;
960 const int supports_transfer = queue_families[i].queueFlags & VK_QUEUE_TRANSFER_BIT;
976 Com_Error(ERR_FATAL,
"Could not find a suitable Vulkan queue family!\n");
980 float queue_priorities = 1.0f;
981 int num_create_queues = 0;
982 VkDeviceQueueCreateInfo queue_create_info[3];
985 VkDeviceQueueCreateInfo q = {
986 .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
988 .pQueuePriorities = &queue_priorities,
992 queue_create_info[num_create_queues++] = q;
995 VkDeviceQueueCreateInfo q = {
996 .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
998 .pQueuePriorities = &queue_priorities,
1001 queue_create_info[num_create_queues++] = q;
1004 VkDeviceQueueCreateInfo q = {
1005 .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
1007 .pQueuePriorities = &queue_priorities,
1010 queue_create_info[num_create_queues++] = q;
1013 VkPhysicalDeviceDescriptorIndexingFeaturesEXT idx_features = {
1014 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT,
1015 .runtimeDescriptorArray = 1,
1016 .shaderSampledImageArrayNonUniformIndexing = 1,
1019 #ifdef VKPT_DEVICE_GROUPS
1021 Com_Printf(
"Enabling multi-GPU support\n");
1022 idx_features.pNext = &device_group_create_info;
1026 VkPhysicalDeviceFeatures2 device_features = {
1027 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR,
1028 .pNext = &idx_features,
1031 .robustBufferAccess = 1,
1032 .fullDrawIndexUint32 = 1,
1033 .imageCubeArray = 1,
1034 .independentBlend = 1,
1035 .geometryShader = 1,
1036 .tessellationShader = 1,
1037 .sampleRateShading = 0,
1040 .multiDrawIndirect = 1,
1041 .drawIndirectFirstInstance = 1,
1043 .depthBiasClamp = 1,
1044 .fillModeNonSolid = 0,
1050 .samplerAnisotropy = 1,
1051 .textureCompressionETC2 = 0,
1052 .textureCompressionASTC_LDR = 0,
1053 .textureCompressionBC = 0,
1054 .occlusionQueryPrecise = 0,
1055 .pipelineStatisticsQuery = 1,
1056 .vertexPipelineStoresAndAtomics = 1,
1057 .fragmentStoresAndAtomics = 1,
1058 .shaderTessellationAndGeometryPointSize = 1,
1059 .shaderImageGatherExtended = 1,
1060 .shaderStorageImageExtendedFormats = 1,
1061 .shaderStorageImageMultisample = 1,
1062 .shaderStorageImageReadWithoutFormat = 1,
1063 .shaderStorageImageWriteWithoutFormat = 1,
1064 .shaderUniformBufferArrayDynamicIndexing = 1,
1065 .shaderSampledImageArrayDynamicIndexing = 1,
1066 .shaderStorageBufferArrayDynamicIndexing = 1,
1067 .shaderStorageImageArrayDynamicIndexing = 1,
1068 .shaderClipDistance = 1,
1069 .shaderCullDistance = 1,
1073 .shaderResourceResidency = 1,
1074 .shaderResourceMinLod = 1,
1076 .sparseResidencyBuffer = 1,
1077 .sparseResidencyImage2D = 1,
1078 .sparseResidencyImage3D = 1,
1079 .sparseResidency2Samples = 1,
1080 .sparseResidency4Samples = 1,
1081 .sparseResidency8Samples = 1,
1082 .sparseResidency16Samples = 1,
1083 .sparseResidencyAliased = 1,
1084 .variableMultisampleRate = 0,
1085 .inheritedQueries = 1,
1088 VkDeviceCreateInfo dev_create_info = {
1089 .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
1090 .pNext = &device_features,
1091 .pQueueCreateInfos = queue_create_info,
1092 .queueCreateInfoCount = num_create_queues,
1099 if (result != VK_SUCCESS)
1109 #define _VK_EXTENSION_DO(a) \
1110 q##a = (PFN_##a) vkGetDeviceProcAddr(qvk.device, #a); \
1111 if(!q##a) { Com_EPrintf("warning: could not load function %s\n", #a); }
1113 #undef _VK_EXTENSION_DO
1115 Com_Printf(
"-----------------------\n");
1120 static VkShaderModule
1127 for(len = 0; path[len]; len++)
1128 path[len] = tolower(path[len]);
1130 if(path[len] ==
'_') {
1140 size = FS_LoadFile(path, &data);
1142 Com_EPrintf(
"Couldn't find shader module %s!\n", path);
1143 return VK_NULL_HANDLE;
1146 VkShaderModule module;
1148 VkShaderModuleCreateInfo create_info = {
1149 .sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
1151 .pCode = (uint32_t *) data,
1154 _VK(vkCreateShaderModule(
qvk.
device, &create_info, NULL, &module));
1164 VkResult ret = VK_SUCCESS;
1165 #define SHADER_MODULE_DO(a) do { \
1166 qvk.shader_modules[a] = create_shader_module_from_file(shader_module_file_names[a], #a); \
1167 ret = (ret == VK_SUCCESS && qvk.shader_modules[a]) ? VK_SUCCESS : VK_ERROR_INITIALIZATION_FAILED; \
1168 if(qvk.shader_modules[a]) { \
1169 ATTACH_LABEL_VARIABLE_NAME((uint64_t)qvk.shader_modules[a], SHADER_MODULE, #a); \
1175 #undef SHADER_MODULE_DO
1182 #define SHADER_MODULE_DO(a) \
1183 vkDestroyShaderModule(qvk.device, qvk.shader_modules[a], NULL); \
1184 qvk.shader_modules[a] = VK_NULL_HANDLE;
1188 #undef SHADER_MODULE_DO
1265 #define MAX_MODEL_LIGHTS 1024
1278 skinnum = entity->skinnum;
1284 const float* transform,
int model_instance_index, qboolean is_viewer_weapon, qboolean is_double_sided)
1290 Com_EPrintf(
"Cannot find material for model '%s'\n", model->name);
1294 int material_id = material->
flags;
1302 if (model->model_class == MCLASS_EXPLOSION)
1308 if (is_viewer_weapon)
1311 if (is_double_sided)
1316 if (entity->flags & RF_SHELL_RED)
1318 if (entity->flags & RF_SHELL_GREEN)
1320 if (entity->flags & RF_SHELL_BLUE)
1326 int frame = entity->frame;
1327 int oldframe = entity->oldframe;
1328 if (frame >= model->numframes) frame = 0;
1329 if (oldframe >= model->numframes) oldframe = 0;
1331 memcpy(instance->
M, transform,
sizeof(
float) * 16);
1334 instance->
backlerp = entity->backlerp;
1336 instance->
alpha = (entity->flags & RF_TRANSLUCENT) ? entity->alpha : 1.0f;
1344 ubo->num_sphere_lights = 0;
1346 for (
int i = 0; i < num_lights; i++)
1348 const dlight_t* light = lights + i;
1350 float* dynlight_data = (
float*)(ubo->sphere_light_data + ubo->num_sphere_lights * 2);
1351 float* center = dynlight_data;
1352 float* radius = dynlight_data + 3;
1353 float*
color = dynlight_data + 4;
1354 dynlight_data[7] = 0.f;
1356 VectorCopy(light->origin, center);
1357 VectorScale(light->color, light->intensity / 25.f,
color);
1358 *radius = light->radius;
1360 ubo->num_sphere_lights++;
1366 vec4_t point = { p[0], p[1], p[2], 1.f };
1369 VectorCopy(transformed, result);
1372 static void process_bsp_entity(
const entity_t* entity,
int* bsp_mesh_idx,
int* instance_idx,
int* num_instanced_vert)
1375 uint32_t* ubo_bsp_cluster_id = (uint32_t*)uniform_instance_buffer->bsp_cluster_id;
1376 uint32_t* ubo_bsp_prim_offset = (uint32_t*)uniform_instance_buffer->bsp_prim_offset;
1377 uint32_t* ubo_instance_buf_offset = (uint32_t*)uniform_instance_buffer->bsp_instance_buf_offset;
1378 uint32_t* ubo_instance_buf_size = (uint32_t*)uniform_instance_buffer->bsp_instance_buf_size;
1380 const int current_bsp_mesh_index = *bsp_mesh_idx;
1383 assert(!
"BSP entity count overflow");
1389 assert(!
"Total entity count overflow");
1395 float transform[16];
1397 BspMeshInstance* ubo_instance_info = uniform_instance_buffer->bsp_mesh_instances + current_bsp_mesh_index;
1398 memcpy(&ubo_instance_info->
M, transform,
sizeof(transform));
1399 ubo_instance_info->
frame = entity->frame;
1400 memset(ubo_instance_info->
padding, 0,
sizeof(ubo_instance_info->
padding));
1414 for (
int corner = 0; corner < 8; corner++)
1416 vec3_t corner_pt = {
1422 vec3_t corner_pt_world;
1431 ubo_bsp_cluster_id[current_bsp_mesh_index] = cluster;
1433 ubo_bsp_prim_offset[current_bsp_mesh_index] = model->
idx_offset / 3;
1435 const int mesh_vertex_num = model->
idx_count;
1437 ubo_instance_buf_offset[current_bsp_mesh_index] = *num_instanced_vert / 3;
1438 ubo_instance_buf_size[current_bsp_mesh_index] = mesh_vertex_num / 3;
1440 ((
int*)uniform_instance_buffer->model_indices)[*instance_idx] = ~current_bsp_mesh_index;
1442 *num_instanced_vert += mesh_vertex_num;
1448 assert(!
"Model light count overflow");
1469 VectorCopy(src_light->
color, dst_light->
color);
1487 #define MESH_FILTER_TRANSPARENT 1
1488 #define MESH_FILTER_OPAQUE 2
1489 #define MESH_FILTER_ALL 3
1492 const entity_t* entity,
1493 const model_t* model,
1494 qboolean is_viewer_weapon,
1495 qboolean is_double_sided,
1496 int* model_instance_idx,
1498 int* num_instanced_vert,
1500 qboolean* contains_transparent)
1503 uint32_t* ubo_instance_buf_offset = (uint32_t*)uniform_instance_buffer->model_instance_buf_offset;
1504 uint32_t* ubo_instance_buf_size = (uint32_t*)uniform_instance_buffer->model_instance_buf_size;
1505 uint32_t* ubo_model_idx_offset = (uint32_t*)uniform_instance_buffer->model_idx_offset;
1506 uint32_t* ubo_model_cluster_id = (uint32_t*)uniform_instance_buffer->model_cluster_id;
1508 float transform[16];
1511 int current_model_instance_index = *model_instance_idx;
1512 int current_instance_index = *instance_idx;
1513 int current_num_instanced_vert = *num_instanced_vert;
1515 if (contains_transparent)
1516 *contains_transparent = qfalse;
1518 for (
int i = 0; i < model->nummeshes; i++)
1524 assert(!
"Model entity count overflow");
1530 assert(!
"Total entity count overflow");
1534 uint32_t material_id =
fill_model_instance(entity, model, mesh, transform, current_model_instance_index, is_viewer_weapon, is_double_sided);
1540 if(contains_transparent)
1541 *contains_transparent = qtrue;
1553 hash.
entity = entity->id;
1554 hash.
model = entity->model;
1559 uint32_t cluster_id = ~0u;
1562 ubo_model_cluster_id[current_model_instance_index] = cluster_id;
1564 ubo_model_idx_offset[current_model_instance_index] = mesh->
idx_offset;
1566 ubo_instance_buf_offset[current_model_instance_index] = current_num_instanced_vert / 3;
1567 ubo_instance_buf_size[current_model_instance_index] = mesh->
numtris;
1569 ((
int*)uniform_instance_buffer->model_indices)[current_instance_index] = current_model_instance_index;
1571 current_model_instance_index++;
1572 current_instance_index++;
1573 current_num_instanced_vert += mesh->
numtris * 3;
1577 if (model->model_class == MCLASS_STATIC_LIGHT)
1579 vec4_t begin, end,
color;
1580 vec4_t offset1 = { 0.f, 0.5f, -10.f, 1.f };
1581 vec4_t offset2 = { 0.f, 0.5f, 10.f, 1.f };
1585 VectorSet(
color, 0.25f, 0.5f, 0.07f);
1590 *model_instance_idx = current_model_instance_index;
1591 *instance_idx = current_instance_index;
1592 *num_instanced_vert = current_num_instanced_vert;
1595 #if CL_RTX_SHADERBALLS
1596 extern vec3_t cl_dev_shaderballs_pos;
1599 vkpt_drop_shaderballs()
1601 VectorCopy(
vkpt_refdef.
fd->vieworg, cl_dev_shaderballs_pos);
1602 cl_dev_shaderballs_pos[2] -= 46.12f;
1613 memcpy(instance_buffer->bsp_mesh_instances_prev, instance_buffer->bsp_mesh_instances,
1614 sizeof(instance_buffer->bsp_mesh_instances_prev));
1615 memcpy(instance_buffer->model_instances_prev, instance_buffer->model_instances,
1616 sizeof(instance_buffer->model_instances_prev));
1618 memcpy(instance_buffer->bsp_cluster_id_prev, instance_buffer->bsp_cluster_id,
sizeof(instance_buffer->bsp_cluster_id));
1619 memcpy(instance_buffer->model_cluster_id_prev, instance_buffer->model_cluster_id,
sizeof(instance_buffer->model_cluster_id));
1621 static int transparent_model_indices[MAX_ENTITIES];
1622 static int viewer_model_indices[MAX_ENTITIES];
1623 static int viewer_weapon_indices[MAX_ENTITIES];
1624 static int explosion_indices[MAX_ENTITIES];
1625 int transparent_model_num = 0;
1626 int viewer_model_num = 0;
1627 int viewer_weapon_num = 0;
1628 int explosion_num = 0;
1630 int model_instance_idx = 0;
1631 int bsp_mesh_idx = 0;
1632 int num_instanced_vert = 0;
1633 int instance_idx = 0;
1641 if (entity->model & 0x80000000)
1645 transparent_model_indices[transparent_model_num++] = i;
1652 if (model == NULL || model->meshes == NULL)
1655 if (entity->flags & RF_VIEWERMODEL)
1656 viewer_model_indices[viewer_model_num++] = i;
1657 else if (first_person_model && entity->flags & RF_WEAPONMODEL)
1658 viewer_weapon_indices[viewer_weapon_num++] = i;
1659 else if (model->model_class == MCLASS_EXPLOSION || model->model_class == MCLASS_SMOKE)
1660 explosion_indices[explosion_num++] = i;
1663 qboolean contains_transparent = qfalse;
1666 if(contains_transparent)
1667 transparent_model_indices[transparent_model_num++] = i;
1674 const uint32_t transparent_model_base_vertex_num = num_instanced_vert;
1675 for (
int i = 0; i < transparent_model_num; i++)
1677 const entity_t* entity =
vkpt_refdef.
fd->entities + transparent_model_indices[i];
1679 if (entity->model & 0x80000000)
1693 const uint32_t viewer_model_base_vertex_num = num_instanced_vert;
1694 if (first_person_model)
1696 for (
int i = 0; i < viewer_model_num; i++)
1698 const entity_t* entity =
vkpt_refdef.
fd->entities + viewer_model_indices[i];
1709 const uint32_t viewer_weapon_base_vertex_num = num_instanced_vert;
1710 for (
int i = 0; i < viewer_weapon_num; i++)
1712 const entity_t* entity =
vkpt_refdef.
fd->entities + viewer_weapon_indices[i];
1716 if (entity->flags & RF_LEFTHAND)
1723 const uint32_t explosion_base_vertex_num = num_instanced_vert;
1724 for (
int i = 0; i < explosion_num; i++)
1726 const entity_t* entity =
vkpt_refdef.
fd->entities + explosion_indices[i];
1737 memset(instance_buffer->world_current_to_prev, ~0u,
sizeof(instance_buffer->world_current_to_prev));
1738 memset(instance_buffer->world_prev_to_current, ~0u,
sizeof(instance_buffer->world_prev_to_current));
1739 memset(instance_buffer->model_current_to_prev, ~0u,
sizeof(instance_buffer->model_current_to_prev));
1740 memset(instance_buffer->model_prev_to_current, ~0u,
sizeof(instance_buffer->model_prev_to_current));
1746 instance_buffer->world_current_to_prev[i] = j;
1747 instance_buffer->world_prev_to_current[j] = i;
1758 instance_buffer->model_current_to_prev[i] = j;
1759 instance_buffer->model_prev_to_current[j] = i;
1765 #ifdef VKPT_IMAGE_DUMPS
1767 copy_to_dump_texture(VkCommandBuffer cmd_buf,
int src_image_index)
1769 VkImage src_image =
qvk.
images[src_image_index];
1770 VkImage dst_image =
qvk.dump_image;
1772 VkImageCopy image_copy_region = {
1773 .srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
1774 .srcSubresource.mipLevel = 0,
1775 .srcSubresource.baseArrayLayer = 0,
1776 .srcSubresource.layerCount = 1,
1782 .dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
1783 .dstSubresource.mipLevel = 0,
1784 .dstSubresource.baseArrayLayer = 0,
1785 .dstSubresource.layerCount = 1,
1796 VkImageSubresourceRange subresource_range = {
1797 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
1800 .baseArrayLayer = 0,
1806 .subresourceRange = subresource_range,
1807 .srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT,
1808 .dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT,
1809 .oldLayout = VK_IMAGE_LAYOUT_GENERAL,
1810 .newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
1815 .subresourceRange = subresource_range,
1816 .srcAccessMask = VK_ACCESS_HOST_READ_BIT,
1817 .dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
1818 .oldLayout = VK_IMAGE_LAYOUT_GENERAL,
1819 .newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
1822 vkCmdCopyImage(cmd_buf,
1823 src_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1824 dst_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1825 1, &image_copy_region);
1829 .subresourceRange = subresource_range,
1830 .srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT,
1831 .dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT,
1832 .oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1833 .newLayout = VK_IMAGE_LAYOUT_GENERAL
1838 .subresourceRange = subresource_range,
1839 .srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
1840 .dstAccessMask = VK_ACCESS_HOST_READ_BIT,
1841 .oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1842 .newLayout = VK_IMAGE_LAYOUT_GENERAL
1856 feedback->viewcluster = viewleaf->cluster;
1858 feedback->viewcluster = -1;
1861 static char const * unknown =
"<unknown>";
1862 char const * view_material = unknown;
1863 char const * view_material_override = unknown;
1875 view_material = image->name;
1876 view_material_override = image->filepath;
1880 strcpy(feedback->view_material, view_material);
1881 strcpy(feedback->view_material_override, view_material_override);
1883 feedback->lookatcluster = readback.
cluster;
1884 feedback->num_light_polys = 0;
1889 feedback->num_light_polys = light_offsets[1] - light_offsets[0];
1892 VectorCopy(readback.
hdr_color, feedback->hdr_color);
1934 const int num_warmup_frames = 5;
1946 char text[MAX_QPATH];
1947 float percentage = powf(max(0.f, (
num_accumulated_frames - num_warmup_frames) / (
float)num_frames_to_accumulate), 0.5f);
1948 Q_snprintf(text,
sizeof(text),
"Photo mode: accumulating samples... %d%%", (
int)(min(1.f, percentage) * 100.f));
1950 int frames_after_accumulation_finished =
num_accumulated_frames - num_warmup_frames - num_frames_to_accumulate;
1951 float hud_alpha = max(0.f, min(1.f, (50 - frames_after_accumulation_finished) * 0.02f));
1990 if (cvar_pt_num_bounce_rays->value == 0.5f)
1993 ref_mode->
num_bounce_rays = max(0, min(2, round(cvar_pt_num_bounce_rays->value)));
2010 VectorSet(sky_matrix[0], 1.f, 0.f, 0.f);
2011 VectorSet(sky_matrix[1], 0.f, 1.f, 0.f);
2012 VectorSet(sky_matrix[2], 0.f, 0.f, 1.f);
2020 VectorCopy(direction,
forward);
2024 VectorSet(
up, 0.f, 0.f, 1.f);
2026 VectorSet(
up, 0.f, 1.f, 0.f);
2033 float aspect = 1.75f;
2034 float tan_half_fov_x = 1.f;
2035 float tan_half_fov_y = tan_half_fov_x / aspect;
2037 VectorCopy(position, data + 0);
2038 VectorCopy(
forward, data + 4);
2039 VectorMA(data + 4, -tan_half_fov_x,
right, data + 4);
2040 VectorMA(data + 4, tan_half_fov_y,
up, data + 4);
2041 VectorScale(
right, 2.f * tan_half_fov_x, data + 8);
2042 VectorScale(
up, -2.f * tan_half_fov_y, data + 12);
2052 memcpy(ubo->V_prev, ubo->V,
sizeof(
float) * 16);
2053 memcpy(ubo->P_prev, ubo->P,
sizeof(
float) * 16);
2054 memcpy(ubo->invP_prev, ubo->invP,
sizeof(
float) * 16);
2055 ubo->cylindrical_hfov_prev = ubo->cylindrical_hfov;
2064 float viewport_proj[16] = {
2076 memcpy(ubo->V,
V,
sizeof(
float) * 16);
2077 memcpy(ubo->P,
P,
sizeof(
float) * 16);
2083 float rad_per_pixel = atanf(tanf(fd->fov_y * M_PI / 360.0f) / ((
float)
qvk.
extent_unscaled.height * 0.5f));
2088 ubo->cylindrical_hfov = 0.f;
2103 ubo->pt_swap_checkerboard = 0;
2107 int camera_cluster_contents = viewleaf ? viewleaf->contents : 0;
2109 if (camera_cluster_contents & CONTENTS_WATER)
2111 else if (camera_cluster_contents & CONTENTS_SLIME)
2113 else if (camera_cluster_contents & CONTENTS_LAVA)
2118 ubo->time = fd->time;
2122 #define UBO_CVAR_DO(name, default_value) ubo->name = cvar_##name->value;
2130 ubo->pt_fake_roughness_threshold = 1.f;
2141 ubo->pt_specular_anti_flicker = 0.f;
2142 ubo->pt_sun_bounce_range = 10000.f;
2143 ubo->pt_ndf_trim = 1.f;
2150 qboolean enable_dof = qtrue;
2154 case 0: enable_dof = qfalse;
break;
2157 default: enable_dof = qtrue;
break;
2163 enable_dof = qfalse;
2169 ubo->pt_aperture = 0.f;
2174 ubo->pt_aperture_type = roundf(ubo->pt_aperture_type);
2183 ubo->pt_direct_area_threshold = 10.f;
2185 ubo->pt_min_log_sky_luminance = exp2f(ubo->pt_min_log_sky_luminance);
2186 ubo->pt_max_log_sky_luminance = exp2f(ubo->pt_max_log_sky_luminance);
2188 memcpy(ubo->cam_pos, fd->vieworg,
sizeof(
float) * 3);
2193 ubo->flt_temporal_lf = 0;
2194 ubo->flt_temporal_hf = 0;
2195 ubo->flt_temporal_spec = 0;
2201 memset(ubo->environment_rotation_matrix, 0,
sizeof(ubo->environment_rotation_matrix));
2202 VectorCopy(sky_matrix[0], ubo->environment_rotation_matrix + 0);
2203 VectorCopy(sky_matrix[1], ubo->environment_rotation_matrix + 4);
2204 VectorCopy(sky_matrix[2], ubo->environment_rotation_matrix + 8);
2218 ubo->pt_cameras = 0;
2230 qboolean render_world = (fd->rdflags & RDF_NOWORLDMODEL) == 0;
2232 static float previous_time = -1.f;
2233 float frame_time = min(1.f, max(0.f, fd->time - previous_time));
2234 previous_time = fd->time;
2238 static unsigned previous_wallclock_time = 0;
2240 float frame_wallclock_time = (previous_wallclock_time != 0) ? (
float)(current_wallclock_time - previous_wallclock_time) * 1e-3f : 0.f;
2241 previous_wallclock_time = current_wallclock_time;
2253 qboolean sun_visible_prev = qfalse;
2254 static float prev_adapted_luminance = 0.f;
2255 float adapted_luminance = 0.f;
2260 if (adapted_luminance != 1.0f)
2261 prev_adapted_luminance = adapted_luminance;
2263 if (prev_adapted_luminance <= 0.f)
2264 prev_adapted_luminance = 0.005f;
2270 vec3_t sky_matrix[3];
2285 qboolean menu_mode = cl_paused->integer == 1 &&
uis.
menuDepth > 0 && render_world;
2296 prepare_ubo(fd, viewleaf, &ref_mode, sky_matrix, render_world);
2297 ubo->prev_adapted_luminance = prev_adapted_luminance;
2302 vec3_t sky_radiance;
2306 float shadowmap_view_proj[16];
2307 float shadowmap_depth_scale;
2312 shadowmap_view_proj,
2313 &shadowmap_depth_scale,
2321 shadowmap_view_proj,
2322 shadowmap_depth_scale);
2345 device_indices[gpu] = gpu;
2349 wait_stages[gpu] = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
2356 (*prev_trace_signaled) ?
qvk.
device_count : 0, prev_trace_semaphores, wait_stages, device_indices,
2360 *prev_trace_signaled = qfalse;
2366 update_transparency(trace_cmd_buf, ubo->V, fd->particles, fd->num_particles, fd->entities, fd->num_entities);
2381 int new_world_anim_frame = (
int)(fd->time * 2);
2382 qboolean update_world_animations = (new_world_anim_frame !=
world_anim_frame);
2402 if (god_rays_enabled)
2426 if (god_rays_enabled)
2440 if (god_rays_enabled)
2474 *curr_trace_signaled = qtrue;
2502 #ifdef VKPT_IMAGE_DUMPS
2503 if (cvar_dump_image->integer)
2505 copy_to_dump_texture(post_cmd_buf, VKPT_IMG_TAA_OUTPUT);
2510 if (cvar_tm_enable->integer != 0)
2555 double a = *(
double*)pa;
2556 double b = *(
double*)pb;
2558 if (a < b)
return -1;
2559 if (a > b)
return 1;
2585 #define SCALING_FRAMES 5
2586 static int num_valid_frames = 0;
2591 num_valid_frames = 0;
2598 num_valid_frames = 0;
2607 if (ms < 0 || ms > 1000)
2610 valid_frame_times[num_valid_frames] = ms;
2616 num_valid_frames = 0;
2620 double representative_time = 0;
2622 representative_time += valid_frame_times[i];
2626 double f =
cvar_drs_gain->value * (1.0 - representative_time / target_time) - 1.0;
2629 if (representative_time < target_time * cvar_drs_adjust_up->value)
2655 if (res_fence == VK_ERROR_DEVICE_LOST)
2658 Com_EPrintf(
"Device lost!\n");
2680 #ifdef VKPT_DEVICE_GROUPS
2681 VkAcquireNextImageInfoKHR acquire_info = {
2682 .sType = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR,
2684 .timeout = (~((uint64_t) 0)),
2686 .fence = VK_NULL_HANDLE,
2695 if(res_swapchain == VK_ERROR_OUT_OF_DATE_KHR || res_swapchain == VK_SUBOPTIMAL_KHR) {
2699 else if(res_swapchain != VK_SUCCESS) {
2750 VkExtent2D extent_render_double;
2766 VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT };
2767 uint32_t wait_device_indices[] = { 0 };
2774 signal_device_indices[gpu] = gpu;
2781 LENGTH(wait_semaphores), wait_semaphores, wait_stages, wait_device_indices,
2786 #ifdef VKPT_IMAGE_DUMPS
2787 if (cvar_dump_image->integer) {
2790 VkImageSubresource subresource = {
2791 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
2796 VkSubresourceLayout subresource_layout;
2797 vkGetImageSubresourceLayout(
qvk.
device,
qvk.dump_image, &subresource, &subresource_layout);
2800 _VK(vkMapMemory(
qvk.
device,
qvk.dump_image_memory, 0,
qvk.dump_image_memory_size, 0, &data));
2808 VkPresentInfoKHR present_info = {
2809 .sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
2811 .pWaitSemaphores = signal_semaphores,
2812 .swapchainCount = 1,
2818 #ifdef VKPT_DEVICE_GROUPS
2819 uint32_t present_device_mask = 1;
2820 VkDeviceGroupPresentInfoKHR group_present_info = {
2821 .sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR,
2822 .swapchainCount = 1,
2823 .pDeviceMasks = &present_device_mask,
2824 .mode = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR,
2828 present_info.pNext = &group_present_info;
2833 if(res_present == VK_ERROR_OUT_OF_DATE_KHR || res_present == VK_SUBOPTIMAL_KHR) {
2842 Com_Printf(
"mode changed %d %d\n",
width,
height);
2875 Com_Error(ERR_FATAL,
"VID_Init failed\n");
2879 extern SDL_Window *sdl_window;
2907 #ifdef VKPT_DEVICE_GROUPS
2908 cvar_sli =
Cvar_Get(
"sli",
"1", CVAR_REFRESH | CVAR_ARCHIVE);
2911 #ifdef VKPT_IMAGE_DUMPS
2912 cvar_dump_image =
Cvar_Get(
"dump_image",
"0", 0);
2928 Com_Error(ERR_FATAL,
"Couldn't initialize the material system.\n");
2931 #define UBO_CVAR_DO(name, default_value) cvar_##name = Cvar_Get(#name, #default_value, 0);
2933 #undef UBO_CVAR_LIST
2949 cvar_pt_num_bounce_rays->flags |= CVAR_ARCHIVE;
2959 Com_Error(ERR_FATAL,
"Couldn't initialize Vulkan.\n");
2980 #if CL_RTX_SHADERBALLS
2981 Cmd_AddCommand(
"drop_balls", (xcommand_t)&vkpt_drop_shaderballs);
2984 for (
int i = 0; i < 256; i++) {
2985 qvk.
sintab[i] = sinf(i * (2 * M_PI / 255));
3007 #if CL_RTX_SHADERBALLS
3018 Com_EPrintf(
"destroy_vulkan failed\n");
3033 Com_EPrintf(
"IMG_ReadPixels: unsupported swap chain format (%d)!\n",
qvk.
surf_format.format);
3041 VkImageSubresourceRange subresource_range = {
3042 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
3045 .baseArrayLayer = 0,
3050 .image = swap_chain_image,
3051 .subresourceRange = subresource_range,
3053 .dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT,
3054 .oldLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
3055 .newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
3060 .subresourceRange = subresource_range,
3061 .srcAccessMask = VK_ACCESS_HOST_READ_BIT,
3062 .dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
3063 .oldLayout = VK_IMAGE_LAYOUT_GENERAL,
3064 .newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
3067 VkImageCopy img_copy_region = {
3068 .srcSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 },
3069 .dstSubresource = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1 },
3073 vkCmdCopyImage(cmd_buf,
3074 swap_chain_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3076 1, &img_copy_region);
3079 .image = swap_chain_image,
3080 .subresourceRange = subresource_range,
3081 .srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT,
3083 .oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3084 .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR
3089 .subresourceRange = subresource_range,
3090 .srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
3091 .dstAccessMask = VK_ACCESS_HOST_READ_BIT,
3092 .oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3093 .newLayout = VK_IMAGE_LAYOUT_GENERAL
3099 VkImageSubresource subresource = {
3100 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
3105 VkSubresourceLayout subresource_layout;
3116 byte* src_row = (
byte*)device_data + subresource_layout.rowPitch * row;
3123 dst_row[0] = src_row[2];
3124 dst_row[1] = src_row[1];
3125 dst_row[2] = src_row[0];
3135 dst_row[0] = src_row[0];
3136 dst_row[1] = src_row[1];
3137 dst_row[2] = src_row[2];
3157 char pathname[MAX_QPATH];
3159 const char *suf[6] = {
"ft",
"bk",
"up",
"dn",
"rt",
"lf" };
3166 int avg_color[3] = { 0 };
3168 for (i = 0; i < 6; i++) {
3169 Q_concat(pathname,
sizeof(pathname),
"env/", name, suf[i],
".tga", NULL);
3173 if(img == R_NOTEXTURE) {
3177 data = Z_Malloc(6 *
sizeof(uint32_t));
3178 for(
int j = 0; j < 6; j++)
3179 ((uint32_t *)data)[j] = 0xff00ffffu;
3180 w_prev = h_prev = 1;
3184 size_t s = img->upload_width * img->upload_height * 4;
3186 data = Z_Malloc(s * 6);
3187 w_prev = img->upload_width;
3188 h_prev = img->upload_height;
3191 memcpy(data + s * i, img->pix_data, s);
3193 for (
int p = 0; p < img->upload_width * img->upload_height; p++)
3195 uint32_t pix = *((uint32_t*)img->pix_data + p);
3196 avg_color[0] += pix & 0xff;
3197 avg_color[1] += (pix >> 8) & 0xff;
3198 avg_color[2] += (pix >> 16) & 0xff;
3201 assert(w_prev == img->upload_width);
3202 assert(h_prev == img->upload_height);
3204 List_Remove(&img->entry);
3208 memset(img, 0,
sizeof(*img));
3211 float inv_num_pixels = 1.0f / (w_prev * h_prev * 6);
3214 (
float)avg_color[0] * inv_num_pixels / 255.f,
3215 (
float)avg_color[1] * inv_num_pixels / 255.f,
3216 (
float)avg_color[2] * inv_num_pixels / 255.f
3231 Com_Printf(
"loading %s\n", name);
3247 char bsp_path[MAX_QPATH];
3248 Q_concat(bsp_path,
sizeof(bsp_path),
"maps/", name,
".bsp", NULL);
3250 qerror_t ret =
BSP_Load(bsp_path, &bsp);
3281 m->world_transparent_count,
3283 m->world_custom_sky_count));
3306 VkCommandBuffer* new_buffers = Z_Mallocz(new_count *
MAX_FRAMES_IN_FLIGHT *
sizeof(VkCommandBuffer));
3315 VkCommandBufferAllocateInfo cmd_buf_alloc_info = {
3316 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
3318 .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
3336 Z_Free(group->buffer_begin_addrs);
3337 group->buffer_begin_addrs = new_addrs;
3347 VkCommandBufferBeginInfo begin_info = {
3348 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
3349 .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
3350 .pInheritanceInfo = NULL,
3352 _VK(vkResetCommandBuffer(cmd_buf, 0));
3353 _VK(vkBeginCommandBuffer(cmd_buf, &begin_info));
3359 #if (defined __GNUC__)
3360 *begin_addr = __builtin_return_address(0);
3361 #elif (defined _MSC_VER)
3362 *begin_addr = _ReturnAddress();
3384 Z_Free(group->buffer_begin_addrs);
3385 group->buffer_begin_addrs = NULL;
3407 vkQueueWaitIdle(queue);
3412 VkCommandBuffer cmd_buf,
3414 uint32_t execute_device_mask,
3415 int wait_semaphore_count,
3416 VkSemaphore* wait_semaphores,
3417 VkPipelineStageFlags* wait_stages,
3418 uint32_t* wait_device_indices,
3419 int signal_semaphore_count,
3420 VkSemaphore* signal_semaphores,
3421 uint32_t* signal_device_indices,
3424 _VK(vkEndCommandBuffer(cmd_buf));
3426 VkSubmitInfo submit_info = {
3427 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
3428 .waitSemaphoreCount = wait_semaphore_count,
3429 .pWaitSemaphores = wait_semaphores,
3430 .pWaitDstStageMask = wait_stages,
3431 .signalSemaphoreCount = signal_semaphore_count,
3432 .pSignalSemaphores = signal_semaphores,
3433 .commandBufferCount = 1,
3434 .pCommandBuffers = &cmd_buf,
3437 #ifdef VKPT_DEVICE_GROUPS
3438 VkDeviceGroupSubmitInfoKHR device_group_submit_info = {
3439 .sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR,
3441 .waitSemaphoreCount = wait_semaphore_count,
3442 .pWaitSemaphoreDeviceIndices = wait_device_indices,
3443 .commandBufferCount = 1,
3444 .pCommandBufferDeviceMasks = &execute_device_mask,
3445 .signalSemaphoreCount = signal_semaphore_count,
3446 .pSignalSemaphoreDeviceIndices = signal_device_indices,
3450 submit_info.pNext = &device_group_submit_info;
3454 _VK(vkQueueSubmit(queue, 1, &submit_info, fence));
3458 for (
int ngroup = 0; ngroup <
LENGTH(groups); ngroup++)
3463 if (group->
buffers[i] == cmd_buf)
3465 group->buffer_begin_addrs[i] = NULL;
3474 VkCommandBuffer cmd_buf,
3478 vkpt_submit_command_buffer(cmd_buf, queue, all_gpus ? (1 <<
qvk.
device_count) - 1 : 1, 0, NULL, NULL, NULL, 0, NULL, NULL, 0);
3482 #include <windows.h>
3492 va_start(args, format);
3493 vsnprintf(buffer,
sizeof(buffer), format, args);
3497 OutputDebugStringA(buffer);
3499 fprintf(stderr,
"%s", buffer);